diff --git a/00_simple_chat_interface.py b/00_simple_chat_interface.py deleted file mode 100644 index 940d56a..0000000 --- a/00_simple_chat_interface.py +++ /dev/null @@ -1,64 +0,0 @@ -from typing import List, Union, Generator, Iterator -from pydantic import BaseModel -import logging -import os -import requests -import json - -logging.basicConfig(level=logging.DEBUG) - -class Pipeline: - class Valves(BaseModel): - OR_MODEL: str - OR_URL: str - OR_KEY: str - - def __init__(self): - self.name = "Basic Openrouter Chat" - self.valves = self.Valves( - **{ - "pipelines": ["*"], - "OR_MODEL": os.getenv("OR_MODEL", "anthropic/claude-3.5-haiku:beta"), - "OR_URL": os.getenv("OR_URL", "https://openrouter.ai/api/v1/chat/completions"), - "OR_KEY": os.getenv("OR_KEY", "OPENROUTER_API_KEY") - } - ) - - async def on_startup(self): - pass - - async def on_shutdown(self): - pass - - def pipe(self, user_message: str, model_id: str, messages: List[dict], body: dict) -> Union[str, Generator, Iterator]: - try: - response = requests.post( - url = self.valves.OR_URL, - headers = { - "Authorization": f"Bearer {self.valves.OR_KEY}" - }, - - data = json.dumps({ - "model": self.valves.OR_MODEL, - "messages": [ - { - "role": "user", - "content": user_message - } - ] - }) - ) - - if response.status_code == 200: - response_data = response.json() - - return response_data["choices"][0]["message"]["content"] - else: - logging.error(f"Error response {response.status_code}: {response.text}") - return f"Error: {response.status_code}" - except requests.HTTPError as e: - logging.error(f"Clientresponse error: {e}") - return "HTTP backend error" - except Exception as e: - logging.error(f"Unexpected error: {e}") - return f"Unexpected error: {e}" \ No newline at end of file diff --git a/01_pipeline_tester.py b/01_pipeline_tester.py deleted file mode 100644 index 9eb9b98..0000000 --- a/01_pipeline_tester.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import List, Union, Generator, Iterator - -class Pipeline: - def __init__(self): - self.name = "Simple pipeline tester" - pass - - async def on_startup(self): - # This function is called when the server is started. - print(f"on_startup:{__name__}") - pass - - async def on_shutdown(self): - # This function is called when the server is shutdown. - print(f"on_shutdown:{__name__}") - pass - - - def pipe(self, user_message: str, model_id: str, messages: List[dict], body: dict) -> Union[str, Generator, Iterator]: - # This function is called when a new user_message is receieved. - - print(f"received message from user: {user_message}") #user_message to logs - return (f"received message from user: {user_message}") #user_message to the UI \ No newline at end of file diff --git a/02_oms_sql_pipeline.py b/02_oms_sql_pipeline.py index 3f69eb9..a80e386 100644 --- a/02_oms_sql_pipeline.py +++ b/02_oms_sql_pipeline.py @@ -2,6 +2,7 @@ from typing import List, Union, Generator, Iterator, Dict from pydantic import BaseModel from sqlalchemy import create_engine from sqlalchemy import text +import pylightxl as xl import logging import os import requests @@ -22,6 +23,8 @@ class Pipeline: DB_DATABASE: str DB_TABLES: List[str] + XL_TEMPLATE_PATH: str + def __init__(self): self.name = "ØMS Membership Database" self.engine = None @@ -38,6 +41,7 @@ class Pipeline: "DB_PASSWORD": os.getenv("DB_PASSWORD", "YOUR_PASSWORD"), "DB_DATABASE": os.getenv("DB_DATABASE", "pp_polarpress_demo_prod"), "DB_TABLES": ["users", "club_memberships", "stripe_transactions", "vipps_transactions"], + "XL_TEMPLATE_PATH": os.getenv("XL_TEMPLATE_PATH", "/var/support/openwebui/xl_templates/oms_dataexport.xlsx") } ) diff --git a/README.md b/README.md index a4febaf..3cba97a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,17 @@ -# Example pipeline for Openweb UI +# Østfold Milsim LLM RAG -These python scripts are meant as a pipeline between Openweb UI and Openrouter +This is a comprehensive script that uses database (MariaDB) querying to +provide a retrieval augmented generation functionality for the language model +accessed through OpenRouter middleware. All configuration values can be edited +through open web ui's valve interface. -1. 00: Queries a openrouter model and returns the response to the chat -2. 01: Simple Echo example \ No newline at end of file +The language model is first asked to provide a syntactically correct query +based on the question the user have asked. It then runs that query and sorts +the results from a python toupple data set into a presentable markdown form. + +Since the response token window for any LLM are limited when it comes to +retrieving large results from a database, another layer is also introduced. +The results from the database query are also exported to a Microsoft Excel +file, uploaded to a cloud provider (seafile, but any cloud provider can be +used through a restful API interface) and shared. The share link is exposed +to the response, so the full dataset can be reviewed. \ No newline at end of file