Updated readme file and pylightxl import for excel manipulation

This commit is contained in:
Helge-Mikael Nordgård 2025-02-01 18:41:42 +01:00
parent 5303d7829d
commit c94fa528fe
4 changed files with 19 additions and 91 deletions

View File

@ -1,64 +0,0 @@
from typing import List, Union, Generator, Iterator
from pydantic import BaseModel
import logging
import os
import requests
import json
logging.basicConfig(level=logging.DEBUG)
class Pipeline:
class Valves(BaseModel):
OR_MODEL: str
OR_URL: str
OR_KEY: str
def __init__(self):
self.name = "Basic Openrouter Chat"
self.valves = self.Valves(
**{
"pipelines": ["*"],
"OR_MODEL": os.getenv("OR_MODEL", "anthropic/claude-3.5-haiku:beta"),
"OR_URL": os.getenv("OR_URL", "https://openrouter.ai/api/v1/chat/completions"),
"OR_KEY": os.getenv("OR_KEY", "OPENROUTER_API_KEY")
}
)
async def on_startup(self):
pass
async def on_shutdown(self):
pass
def pipe(self, user_message: str, model_id: str, messages: List[dict], body: dict) -> Union[str, Generator, Iterator]:
try:
response = requests.post(
url = self.valves.OR_URL,
headers = {
"Authorization": f"Bearer {self.valves.OR_KEY}"
},
data = json.dumps({
"model": self.valves.OR_MODEL,
"messages": [
{
"role": "user",
"content": user_message
}
]
})
)
if response.status_code == 200:
response_data = response.json()
return response_data["choices"][0]["message"]["content"]
else:
logging.error(f"Error response {response.status_code}: {response.text}")
return f"Error: {response.status_code}"
except requests.HTTPError as e:
logging.error(f"Clientresponse error: {e}")
return "HTTP backend error"
except Exception as e:
logging.error(f"Unexpected error: {e}")
return f"Unexpected error: {e}"

View File

@ -1,23 +0,0 @@
from typing import List, Union, Generator, Iterator
class Pipeline:
def __init__(self):
self.name = "Simple pipeline tester"
pass
async def on_startup(self):
# This function is called when the server is started.
print(f"on_startup:{__name__}")
pass
async def on_shutdown(self):
# This function is called when the server is shutdown.
print(f"on_shutdown:{__name__}")
pass
def pipe(self, user_message: str, model_id: str, messages: List[dict], body: dict) -> Union[str, Generator, Iterator]:
# This function is called when a new user_message is receieved.
print(f"received message from user: {user_message}") #user_message to logs
return (f"received message from user: {user_message}") #user_message to the UI

View File

@ -2,6 +2,7 @@ from typing import List, Union, Generator, Iterator, Dict
from pydantic import BaseModel from pydantic import BaseModel
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy import text from sqlalchemy import text
import pylightxl as xl
import logging import logging
import os import os
import requests import requests
@ -22,6 +23,8 @@ class Pipeline:
DB_DATABASE: str DB_DATABASE: str
DB_TABLES: List[str] DB_TABLES: List[str]
XL_TEMPLATE_PATH: str
def __init__(self): def __init__(self):
self.name = "ØMS Membership Database" self.name = "ØMS Membership Database"
self.engine = None self.engine = None
@ -38,6 +41,7 @@ class Pipeline:
"DB_PASSWORD": os.getenv("DB_PASSWORD", "YOUR_PASSWORD"), "DB_PASSWORD": os.getenv("DB_PASSWORD", "YOUR_PASSWORD"),
"DB_DATABASE": os.getenv("DB_DATABASE", "pp_polarpress_demo_prod"), "DB_DATABASE": os.getenv("DB_DATABASE", "pp_polarpress_demo_prod"),
"DB_TABLES": ["users", "club_memberships", "stripe_transactions", "vipps_transactions"], "DB_TABLES": ["users", "club_memberships", "stripe_transactions", "vipps_transactions"],
"XL_TEMPLATE_PATH": os.getenv("XL_TEMPLATE_PATH", "/var/support/openwebui/xl_templates/oms_dataexport.xlsx")
} }
) )

View File

@ -1,6 +1,17 @@
# Example pipeline for Openweb UI # Østfold Milsim LLM RAG
These python scripts are meant as a pipeline between Openweb UI and Openrouter This is a comprehensive script that uses database (MariaDB) querying to
provide a retrieval augmented generation functionality for the language model
accessed through OpenRouter middleware. All configuration values can be edited
through open web ui's valve interface.
1. 00: Queries a openrouter model and returns the response to the chat The language model is first asked to provide a syntactically correct query
2. 01: Simple Echo example based on the question the user have asked. It then runs that query and sorts
the results from a python toupple data set into a presentable markdown form.
Since the response token window for any LLM are limited when it comes to
retrieving large results from a database, another layer is also introduced.
The results from the database query are also exported to a Microsoft Excel
file, uploaded to a cloud provider (seafile, but any cloud provider can be
used through a restful API interface) and shared. The share link is exposed
to the response, so the full dataset can be reviewed.