Compare commits
22 Commits
ui_overhau
...
tool_use
Author | SHA1 | Date | |
---|---|---|---|
eda6cdbdf0 | |||
23fa40723a | |||
b862358902 | |||
1833420460 | |||
131bb6d60f | |||
1de9ba68ac | |||
58d63c27ae | |||
6d0134c34d | |||
f56d9b59c0 | |||
0c9059bcbc | |||
434a6ea6fc | |||
c8e2d46013 | |||
bbc4bd9115 | |||
5111ae6a7b | |||
a888e84079 | |||
d90d4fe340 | |||
26514e9fdd | |||
9d25dd7d94 | |||
b56619a2e6 | |||
90e0b4ff7f | |||
47059dabdc | |||
d050549dd8 |
20
.gitea/workflows/datadog-sca.yml
Normal file
20
.gitea/workflows/datadog-sca.yml
Normal file
@ -0,0 +1,20 @@
|
||||
on: [push]
|
||||
|
||||
name: Datadog Software Composition Analysis
|
||||
|
||||
jobs:
|
||||
software-composition-analysis:
|
||||
runs-on: ubuntu-latest
|
||||
name: Datadog SBOM Generation and Upload
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Check imported libraries are secure and compliant
|
||||
id: datadog-software-composition-analysis
|
||||
uses: DataDog/datadog-sca-github-action@main
|
||||
with:
|
||||
dd_api_key: ${{ secrets.DD_API_KEY }}
|
||||
dd_app_key: ${{ secrets.DD_APP_KEY }}
|
||||
dd_service: jarvis
|
||||
dd_env: ci
|
||||
dd_site: us5.datadoghq.com
|
21
.gitea/workflows/datadog-static-analysis.yml
Normal file
21
.gitea/workflows/datadog-static-analysis.yml
Normal file
@ -0,0 +1,21 @@
|
||||
on: [push]
|
||||
|
||||
name: Datadog Static Analysis
|
||||
|
||||
jobs:
|
||||
static-analysis:
|
||||
runs-on: ubuntu-latest
|
||||
name: Datadog Static Analyzer
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Check code meets quality and security standards
|
||||
id: datadog-static-analysis
|
||||
uses: DataDog/datadog-static-analyzer-github-action@v1
|
||||
with:
|
||||
dd_api_key: ${{ secrets.DD_API_KEY }}
|
||||
dd_app_key: ${{ secrets.DD_APP_KEY }}
|
||||
dd_service: jarvis
|
||||
dd_env: ci
|
||||
dd_site: us5.datadoghq.com
|
||||
cpu_count: 2
|
20
.github/workflows/datadog-sca.yml
vendored
Normal file
20
.github/workflows/datadog-sca.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
on: [push]
|
||||
|
||||
name: Datadog Software Composition Analysis
|
||||
|
||||
jobs:
|
||||
software-composition-analysis:
|
||||
runs-on: ubuntu-latest
|
||||
name: Datadog SBOM Generation and Upload
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Check imported libraries are secure and compliant
|
||||
id: datadog-software-composition-analysis
|
||||
uses: DataDog/datadog-sca-github-action@main
|
||||
with:
|
||||
dd_api_key: ${{ secrets.DD_API_KEY }}
|
||||
dd_app_key: ${{ secrets.DD_APP_KEY }}
|
||||
dd_service: jarvis
|
||||
dd_env: ci
|
||||
dd_site: us5.datadoghq.com
|
21
.github/workflows/datadog-static-analysis.yml
vendored
Normal file
21
.github/workflows/datadog-static-analysis.yml
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
on: [push]
|
||||
|
||||
name: Datadog Static Analysis
|
||||
|
||||
jobs:
|
||||
static-analysis:
|
||||
runs-on: ubuntu-latest
|
||||
name: Datadog Static Analyzer
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Check code meets quality and security standards
|
||||
id: datadog-static-analysis
|
||||
uses: DataDog/datadog-static-analyzer-github-action@v1
|
||||
with:
|
||||
dd_api_key: ${{ secrets.DD_API_KEY }}
|
||||
dd_app_key: ${{ secrets.DD_APP_KEY }}
|
||||
dd_service: jarvis
|
||||
dd_env: ci
|
||||
dd_site: us5.datadoghq.com
|
||||
cpu_count: 2
|
43
.gitignore
vendored
43
.gitignore
vendored
@ -174,3 +174,46 @@ cython_debug/
|
||||
pyvenv.cfg
|
||||
.venv
|
||||
pip-selfcheck.json
|
||||
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules
|
||||
jspm_packages
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
.next
|
||||
|
||||
config.ini
|
||||
*.db
|
20
Dockerfile
Normal file
20
Dockerfile
Normal file
@ -0,0 +1,20 @@
|
||||
# Use an official Python runtime as a parent image
|
||||
FROM python:3.9-slim
|
||||
|
||||
# Set the working directory in the container
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the current directory contents into the container at /app
|
||||
COPY . /app
|
||||
|
||||
# Install any needed packages specified in requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Make port 5001 available to the world outside this container
|
||||
EXPOSE 5001
|
||||
|
||||
# Define environment variable
|
||||
ENV FLASK_APP=main.py
|
||||
|
||||
# Run app.py when the container launches
|
||||
CMD ["python", "main.py"]
|
138
client.py
Normal file
138
client.py
Normal file
@ -0,0 +1,138 @@
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
class LLMChatClient:
|
||||
def __init__(self, base_url, api_key):
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.headers = {"X-API-Key": api_key, "Content-Type": "application/json"}
|
||||
|
||||
def submit_query(self, message):
|
||||
"""
|
||||
Submit a query to the LLM Chat Server.
|
||||
|
||||
Args:
|
||||
message (str): The message to send to the server.
|
||||
|
||||
Returns:
|
||||
str: The query ID for the submitted query.
|
||||
|
||||
Raises:
|
||||
requests.RequestException: If the request fails.
|
||||
|
||||
Example:
|
||||
client = LLMChatClient('http://localhost:5001', 'your-api-key')
|
||||
query_id = client.submit_query('What is the capital of France?')
|
||||
print(f"Query ID: {query_id}")
|
||||
|
||||
cURL equivalent:
|
||||
curl -X POST http://localhost:5001/api/v1/query \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-API-Key: your-api-key" \
|
||||
-d '{"message": "What is the capital of France?"}'
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/query"
|
||||
data = {"message": message}
|
||||
response = requests.post(url, json=data, headers=self.headers)
|
||||
response.raise_for_status()
|
||||
return response.json()["query_id"]
|
||||
|
||||
def get_query_status(self, query_id):
|
||||
"""
|
||||
Get the status of a submitted query.
|
||||
|
||||
Args:
|
||||
query_id (str): The ID of the query to check.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the status and conversation history (if completed).
|
||||
|
||||
Raises:
|
||||
requests.RequestException: If the request fails.
|
||||
|
||||
Example:
|
||||
client = LLMChatClient('http://localhost:5001', 'your-api-key')
|
||||
status = client.get_query_status('query-id-here')
|
||||
print(f"Query status: {status['status']}")
|
||||
if status['status'] == 'completed':
|
||||
print(f"Conversation history: {status['conversation_history']}")
|
||||
|
||||
cURL equivalent:
|
||||
curl -X GET http://localhost:5001/api/v1/query_status/query-id-here \
|
||||
-H "X-API-Key: your-api-key"
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/query_status/{query_id}"
|
||||
response = requests.get(url, headers=self.headers)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def submit_query_and_wait(self, message, max_wait_time=300, poll_interval=2):
|
||||
"""
|
||||
Submit a query and wait for the result.
|
||||
|
||||
Args:
|
||||
message (str): The message to send to the server.
|
||||
max_wait_time (int): Maximum time to wait for the result in seconds.
|
||||
poll_interval (int): Time between status checks in seconds.
|
||||
|
||||
Returns:
|
||||
dict: The completed conversation history.
|
||||
|
||||
Raises:
|
||||
requests.RequestException: If the request fails.
|
||||
TimeoutError: If the query doesn't complete within max_wait_time.
|
||||
|
||||
Example:
|
||||
client = LLMChatClient('http://localhost:5001', 'your-api-key')
|
||||
result = client.submit_query_and_wait('What is the capital of France?')
|
||||
print(f"Conversation history: {result}")
|
||||
"""
|
||||
query_id = self.submit_query(message)
|
||||
start_time = time.time()
|
||||
|
||||
while time.time() - start_time < max_wait_time:
|
||||
status = self.get_query_status(query_id)
|
||||
if status["status"] == "completed":
|
||||
return status["conversation_history"]
|
||||
time.sleep(poll_interval)
|
||||
|
||||
raise TimeoutError(f"Query did not complete within {max_wait_time} seconds")
|
||||
|
||||
|
||||
class LLMChatAdminClient:
|
||||
def __init__(self, base_url, admin_key):
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.admin_key = admin_key
|
||||
self.headers = {"X-Admin-Key": admin_key, "Content-Type": "application/json"}
|
||||
|
||||
def generate_api_key(self, username):
|
||||
"""
|
||||
Generate a new API key for a user.
|
||||
|
||||
Args:
|
||||
username (str): The username to generate the API key for.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the username and generated API key.
|
||||
|
||||
Raises:
|
||||
requests.RequestException: If the request fails.
|
||||
|
||||
Example:
|
||||
admin_client = LLMChatAdminClient('http://localhost:5001', 'your-admin-key')
|
||||
result = admin_client.generate_api_key('new_user')
|
||||
print(f"Generated API key for {result['username']}: {result['api_key']}")
|
||||
|
||||
cURL equivalent:
|
||||
curl -X POST http://localhost:5001/admin/generate_key \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Admin-Key: your-admin-key" \
|
||||
-d '{"username": "new_user"}'
|
||||
"""
|
||||
url = f"{self.base_url}/admin/generate_key"
|
||||
data = {"username": username}
|
||||
response = requests.post(url, json=data, headers=self.headers)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
16
docker-compose.yml
Normal file
16
docker-compose.yml
Normal file
@ -0,0 +1,16 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
llm-chat-server:
|
||||
build: .
|
||||
ports:
|
||||
- "5001:5001"
|
||||
volumes:
|
||||
- ./llm_chat_server.db:/app/llm_chat_server.db
|
||||
- ./config.ini:/app/config.ini
|
||||
environment:
|
||||
- FLASK_ENV=production
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
llm_chat_server_db:
|
777
index.html
777
index.html
@ -6,63 +6,110 @@
|
||||
<title>DWS Intelligence</title>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/moment@2.29.4/moment.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chartjs-adapter-moment@1.0.1/dist/chartjs-adapter-moment.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/highlight.min.js"></script>
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.7.0/styles/default.min.css">
|
||||
<link href="https://fonts.googleapis.com/css2?family=Noto+Sans+Mono:wght@400;700&display=swap" rel="stylesheet">
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
padding: 20px;
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
background-color: #000;
|
||||
color: #fff;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
}
|
||||
#chat-container {
|
||||
border: 1px solid #ccc;
|
||||
height: 400px;
|
||||
border: 2px solid #444;
|
||||
flex: 1;
|
||||
overflow-y: auto;
|
||||
padding: 10px;
|
||||
margin-bottom: 10px;
|
||||
background-color: #111;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
#input-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 10px;
|
||||
background-color: #222;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
#user-input {
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
background-color: #000;
|
||||
color: #fff;
|
||||
border: 1px solid #444;
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
font-size: 16px;
|
||||
margin-bottom: 10px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
#send-button {
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
background-color: #4CAF50;
|
||||
color: white;
|
||||
background-color: #444;
|
||||
color: #fff;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
font-size: 16px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
.message {
|
||||
margin-bottom: 10px;
|
||||
font-size: 16px;
|
||||
}
|
||||
.user-message {
|
||||
text-align: right;
|
||||
color: blue;
|
||||
color: #0ff;
|
||||
}
|
||||
.bot-message {
|
||||
text-align: left;
|
||||
color: green;
|
||||
color: #fff;
|
||||
}
|
||||
.bot-message pre {
|
||||
background-color: #222;
|
||||
padding: 10px;
|
||||
border-radius: 5px;
|
||||
overflow-x: auto;
|
||||
}
|
||||
.bot-message code {
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
font-size: 14px;
|
||||
}
|
||||
.thinking {
|
||||
font-style: italic;
|
||||
color: #888;
|
||||
}
|
||||
.thought-summary {
|
||||
cursor: pointer;
|
||||
color: #888;
|
||||
margin-bottom: 5px;
|
||||
font-weight: bold;
|
||||
margin-bottom: 5px;
|
||||
padding: 5px;
|
||||
border-radius: 3px;
|
||||
}
|
||||
.thought-summary.plan { background-color: #2c3e50; }
|
||||
.thought-summary.decision { background-color: #34495e; }
|
||||
.thought-summary.tool_call { background-color: #16a085; }
|
||||
.thought-summary.tool_result { background-color: #27ae60; }
|
||||
.thought-summary.think_more { background-color: #2980b9; }
|
||||
.thought-summary.answer { background-color: #8e44ad; }
|
||||
.thought-summary.reply { background-color: #f39c12; }
|
||||
.thought-summary.thoughts { background-color: #f39c12; }
|
||||
.thought-details {
|
||||
display: none;
|
||||
margin-left: 20px;
|
||||
border-left: 2px solid #ccc;
|
||||
border-left: 2px solid #444;
|
||||
padding-left: 10px;
|
||||
margin-bottom: 10px;
|
||||
white-space: pre-wrap;
|
||||
font-family: monospace;
|
||||
background-color: #f0f0f0;
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
background-color: #222;
|
||||
}
|
||||
.collapsible::before {
|
||||
content: '▶ ';
|
||||
@ -72,23 +119,355 @@
|
||||
.collapsible.open::before {
|
||||
transform: rotate(90deg);
|
||||
}
|
||||
.led {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
border-radius: 50%;
|
||||
background-color: #f00;
|
||||
margin-right: 10px;
|
||||
position: relative;
|
||||
}
|
||||
.led::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: -5px;
|
||||
left: -5px;
|
||||
right: -5px;
|
||||
bottom: -5px;
|
||||
background-color: #f00;
|
||||
border-radius: 50%;
|
||||
filter: blur(5px);
|
||||
opacity: 0;
|
||||
transition: opacity 0.5s ease-in-out;
|
||||
}
|
||||
.led.blinking {
|
||||
animation: blink 1s step-start infinite;
|
||||
}
|
||||
.led.blinking::after {
|
||||
animation: glow 1s ease-in-out infinite alternate;
|
||||
}
|
||||
@keyframes blink {
|
||||
50% {
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
@keyframes glow {
|
||||
0% {
|
||||
opacity: 0;
|
||||
}
|
||||
100% {
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
/* PDP-11 inspired styles */
|
||||
#chat-container::-webkit-scrollbar {
|
||||
width: 12px;
|
||||
}
|
||||
#chat-container::-webkit-scrollbar-track {
|
||||
background: #222;
|
||||
}
|
||||
#chat-container::-webkit-scrollbar-thumb {
|
||||
background-color: #444;
|
||||
border-radius: 6px;
|
||||
border: 3px solid #222;
|
||||
}
|
||||
.pdp-panel {
|
||||
background-color: #333;
|
||||
border: 2px solid #555;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.pdp-label {
|
||||
font-size: 14px;
|
||||
color: #888;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
#main-container {
|
||||
display: flex;
|
||||
height: 100vh;
|
||||
}
|
||||
|
||||
#chat-area {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#sidebar {
|
||||
width: 300px;
|
||||
background-color: #222;
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
overflow-y: auto;
|
||||
transition: transform 0.3s ease-in-out;
|
||||
}
|
||||
|
||||
#sidebar.collapsed {
|
||||
transform: translateX(100%);
|
||||
}
|
||||
|
||||
#sidebar-toggle {
|
||||
position: fixed;
|
||||
top: 10px;
|
||||
right: 10px;
|
||||
z-index: 1000;
|
||||
background-color: #444;
|
||||
color: #fff;
|
||||
border: none;
|
||||
padding: 5px 10px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.graph-container {
|
||||
margin-bottom: 20px;
|
||||
height: 150px;
|
||||
}
|
||||
|
||||
.graph-title {
|
||||
color: #888;
|
||||
font-size: 14px;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
#sidebar {
|
||||
position: fixed;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
max-width: 300px;
|
||||
transform: translateX(100%);
|
||||
}
|
||||
|
||||
#sidebar.collapsed {
|
||||
transform: translateX(0);
|
||||
}
|
||||
}
|
||||
|
||||
.conversation-history-container {
|
||||
margin-top: 20px;
|
||||
background-color: #222;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#conversation-history {
|
||||
color: #fff;
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
.history-card {
|
||||
background-color: #2c3e50;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.history-role {
|
||||
font-weight: bold;
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
.history-content {
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.error-message {
|
||||
background-color: #ff6b6b;
|
||||
color: #fff;
|
||||
padding: 10px;
|
||||
border-radius: 5px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.retrying {
|
||||
background-color: #feca57;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
#clear-history-button {
|
||||
background-color: #e74c3c;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 10px;
|
||||
margin-bottom: 10px;
|
||||
cursor: pointer;
|
||||
font-family: 'Noto Sans Mono', monospace;
|
||||
font-size: 14px;
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
#clear-history-button:hover {
|
||||
background-color: #c0392b;
|
||||
}
|
||||
|
||||
#chat-tabs {
|
||||
display: flex;
|
||||
background-color: #222;
|
||||
padding: 10px 10px 0 10px;
|
||||
}
|
||||
|
||||
.chat-tab {
|
||||
background-color: #444;
|
||||
color: #fff;
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
margin-right: 5px;
|
||||
cursor: pointer;
|
||||
border-top-left-radius: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
}
|
||||
|
||||
.chat-tab.active {
|
||||
background-color: #666;
|
||||
}
|
||||
|
||||
#new-chat-button {
|
||||
background-color: #27ae60;
|
||||
color: #fff;
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
cursor: pointer;
|
||||
border-top-left-radius: 5px;
|
||||
border-top-right-radius: 5px;
|
||||
}
|
||||
|
||||
.close-tab {
|
||||
margin-left: 10px;
|
||||
color: #ff6b6b;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.thinking-section {
|
||||
margin-bottom: 20px;
|
||||
border-left: 2px solid #444;
|
||||
padding-left: 10px;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>DWS Intelligence</h1>
|
||||
<div id="main-container">
|
||||
<div id="chat-area">
|
||||
<div id="chat-tabs"></div>
|
||||
<div id="chat-container"></div>
|
||||
<div id="input-container" class="pdp-panel">
|
||||
<div class="pdp-label">INPUT:</div>
|
||||
<textarea id="user-input" placeholder="Type your message here..." rows="3"></textarea>
|
||||
<button id="send-button">Send</button>
|
||||
<button id="send-button">EXECUTE</button>
|
||||
</div>
|
||||
</div>
|
||||
<button id="sidebar-toggle">Toggle Charts</button>
|
||||
<div id="sidebar" class="collapsed">
|
||||
<div class="graph-container">
|
||||
<div class="graph-title">CPU Load</div>
|
||||
<canvas id="cpuChart"></canvas>
|
||||
</div>
|
||||
<div class="graph-container">
|
||||
<div class="graph-title">Memory Usage</div>
|
||||
<canvas id="memoryChart"></canvas>
|
||||
</div>
|
||||
<div class="graph-container">
|
||||
<div class="graph-title">Disk I/O</div>
|
||||
<canvas id="diskChart"></canvas>
|
||||
</div>
|
||||
<div class="graph-container">
|
||||
<div class="graph-title">GPU Load</div>
|
||||
<canvas id="gpuChart"></canvas>
|
||||
</div>
|
||||
<div class="graph-container">
|
||||
<div class="graph-title">GPU Memory</div>
|
||||
<canvas id="gpuMemoryChart"></canvas>
|
||||
</div>
|
||||
|
||||
<!-- Add this new section for conversation history -->
|
||||
<div class="conversation-history-container">
|
||||
<div class="graph-title">Conversation History</div>
|
||||
<div id="conversation-history"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const socket = io();
|
||||
const chatContainer = document.getElementById('chat-container');
|
||||
const userInput = document.getElementById('user-input');
|
||||
const sendButton = document.getElementById('send-button');
|
||||
const chatTabs = document.getElementById('chat-tabs');
|
||||
|
||||
let thinkingElement = null;
|
||||
let thinkingDetails = null;
|
||||
let thinkingStartTime = null;
|
||||
let currentChatId = null;
|
||||
let chats = {};
|
||||
|
||||
function createNewChat() {
|
||||
const chatId = Date.now().toString();
|
||||
chats[chatId] = {
|
||||
messages: [],
|
||||
thinkingSections: []
|
||||
};
|
||||
addChatTab(chatId);
|
||||
switchToChat(chatId);
|
||||
saveChats();
|
||||
}
|
||||
|
||||
function addChatTab(chatId) {
|
||||
const tab = document.createElement('button');
|
||||
tab.classList.add('chat-tab');
|
||||
tab.textContent = `Chat ${Object.keys(chats).length}`;
|
||||
tab.onclick = () => switchToChat(chatId);
|
||||
|
||||
const closeButton = document.createElement('span');
|
||||
closeButton.classList.add('close-tab');
|
||||
closeButton.textContent = '×';
|
||||
closeButton.onclick = (e) => {
|
||||
e.stopPropagation();
|
||||
closeChat(chatId);
|
||||
};
|
||||
|
||||
tab.appendChild(closeButton);
|
||||
chatTabs.insertBefore(tab, chatTabs.lastElementChild);
|
||||
}
|
||||
|
||||
function switchToChat(chatId) {
|
||||
currentChatId = chatId;
|
||||
document.querySelectorAll('.chat-tab').forEach(tab => tab.classList.remove('active'));
|
||||
document.querySelector(`.chat-tab:nth-child(${Object.keys(chats).indexOf(chatId) + 1})`).classList.add('active');
|
||||
renderChat(chatId);
|
||||
}
|
||||
|
||||
function closeChat(chatId) {
|
||||
delete chats[chatId];
|
||||
saveChats();
|
||||
const tabToRemove = Array.from(chatTabs.children).find(tab => tab.textContent.includes(`Chat ${Object.keys(chats).indexOf(chatId) + 1}`));
|
||||
if (tabToRemove) {
|
||||
chatTabs.removeChild(tabToRemove);
|
||||
}
|
||||
if (currentChatId === chatId) {
|
||||
const remainingChatIds = Object.keys(chats);
|
||||
if (remainingChatIds.length > 0) {
|
||||
switchToChat(remainingChatIds[0]);
|
||||
} else {
|
||||
createNewChat();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function renderChat(chatId) {
|
||||
chatContainer.innerHTML = '';
|
||||
const chat = chats[chatId];
|
||||
chat.messages.forEach(message => addMessage(message.content, message.isUser));
|
||||
chat.thinkingSections.forEach(section => {
|
||||
const thinkingSection = createThinkingSection();
|
||||
section.thoughts.forEach(thought => addThought(thought.type, thought.content, thought.details, thinkingSection));
|
||||
});
|
||||
}
|
||||
|
||||
function createThinkingSection() {
|
||||
const section = document.createElement('div');
|
||||
section.classList.add('thinking-section');
|
||||
chatContainer.appendChild(section);
|
||||
return section;
|
||||
}
|
||||
|
||||
function addMessage(message, isUser) {
|
||||
const messageElement = document.createElement('div');
|
||||
@ -97,53 +476,40 @@
|
||||
messageElement.innerHTML = isUser ? message : marked.parse(message);
|
||||
chatContainer.appendChild(messageElement);
|
||||
chatContainer.scrollTop = chatContainer.scrollHeight;
|
||||
|
||||
if (currentChatId) {
|
||||
chats[currentChatId].messages.push({ content: message, isUser: isUser });
|
||||
saveChats();
|
||||
}
|
||||
}
|
||||
|
||||
function startThinking() {
|
||||
thinkingElement = document.createElement('div');
|
||||
thinkingElement.classList.add('thought-summary', 'collapsible');
|
||||
thinkingElement.textContent = 'Thinking...';
|
||||
thinkingElement.onclick = toggleThinkingDetails;
|
||||
|
||||
thinkingDetails = document.createElement('div');
|
||||
thinkingDetails.classList.add('thought-details');
|
||||
|
||||
chatContainer.appendChild(thinkingElement);
|
||||
chatContainer.appendChild(thinkingDetails);
|
||||
|
||||
thinkingStartTime = Date.now();
|
||||
chatContainer.scrollTop = chatContainer.scrollHeight;
|
||||
}
|
||||
|
||||
function addThought(step, content) {
|
||||
if (thinkingDetails) {
|
||||
function addThought(type, content, details = '', thinkingSection) {
|
||||
const stepElement = document.createElement('div');
|
||||
stepElement.classList.add('thought-summary', 'collapsible');
|
||||
stepElement.textContent = step;
|
||||
stepElement.classList.add('thought-summary', 'collapsible', type);
|
||||
stepElement.textContent = type.charAt(0).toUpperCase() + type.slice(1).replace('_', ' ') + ':';
|
||||
stepElement.onclick = toggleStepDetails;
|
||||
|
||||
const stepDetails = document.createElement('div');
|
||||
stepDetails.classList.add('thought-details');
|
||||
stepDetails.innerHTML = content;
|
||||
|
||||
thinkingDetails.appendChild(stepElement);
|
||||
thinkingDetails.appendChild(stepDetails);
|
||||
if (type === 'error') {
|
||||
stepElement.classList.add('error-message');
|
||||
if (content.includes('retrying')) {
|
||||
stepElement.classList.add('retrying');
|
||||
}
|
||||
stepDetails.innerHTML = marked.parse(content + '\n\nDetails:\n```\n' + details + '\n```');
|
||||
} else {
|
||||
stepDetails.innerHTML = marked.parse(content);
|
||||
}
|
||||
|
||||
thinkingSection.appendChild(stepElement);
|
||||
thinkingSection.appendChild(stepDetails);
|
||||
chatContainer.scrollTop = chatContainer.scrollHeight;
|
||||
}
|
||||
}
|
||||
|
||||
function endThinking(thinkingTime) {
|
||||
if (thinkingElement) {
|
||||
thinkingElement.textContent = `Thinking... (${thinkingTime}s)`;
|
||||
thinkingStartTime = null;
|
||||
}
|
||||
}
|
||||
|
||||
function toggleThinkingDetails() {
|
||||
this.classList.toggle('open');
|
||||
const details = this.nextElementSibling;
|
||||
if (details) {
|
||||
details.style.display = details.style.display === 'none' ? 'block' : 'none';
|
||||
if (currentChatId) {
|
||||
const currentThinkingSection = chats[currentChatId].thinkingSections[chats[currentChatId].thinkingSections.length - 1];
|
||||
currentThinkingSection.thoughts.push({ type, content, details });
|
||||
saveChats();
|
||||
}
|
||||
}
|
||||
|
||||
@ -155,34 +521,71 @@
|
||||
}
|
||||
}
|
||||
|
||||
socket.on('thinking', (data) => {
|
||||
if (!thinkingElement) startThinking();
|
||||
addThought(data.step, 'Started');
|
||||
});
|
||||
function saveChats() {
|
||||
localStorage.setItem('chats', JSON.stringify(chats));
|
||||
}
|
||||
|
||||
socket.on('thought', (data) => {
|
||||
addThought('Result', data.content);
|
||||
});
|
||||
|
||||
socket.on('chat_response', (data) => {
|
||||
endThinking(data.thinking_time);
|
||||
addMessage(data.response, false);
|
||||
});
|
||||
|
||||
socket.on('error', (data) => {
|
||||
endThinking(data.thinking_time);
|
||||
addMessage(`Error: ${data.message}`, false);
|
||||
});
|
||||
function loadChats() {
|
||||
const storedChats = localStorage.getItem('chats');
|
||||
if (storedChats) {
|
||||
chats = JSON.parse(storedChats);
|
||||
Object.keys(chats).forEach(chatId => addChatTab(chatId));
|
||||
if (Object.keys(chats).length > 0) {
|
||||
switchToChat(Object.keys(chats)[0]);
|
||||
} else {
|
||||
createNewChat();
|
||||
}
|
||||
} else {
|
||||
createNewChat();
|
||||
}
|
||||
}
|
||||
|
||||
function sendMessage() {
|
||||
const message = userInput.value.trim();
|
||||
if (message) {
|
||||
if (message && currentChatId) {
|
||||
addMessage(message, true);
|
||||
socket.emit('chat_request', { message: message });
|
||||
chats[currentChatId].thinkingSections.push({ thoughts: [] });
|
||||
socket.emit('chat_request', {
|
||||
message: message,
|
||||
conversation_history: chats[currentChatId].messages.filter(m => !m.isUser).map(m => ({ role: 'assistant', content: m.content }))
|
||||
.concat(chats[currentChatId].messages.filter(m => m.isUser).map(m => ({ role: 'user', content: m.content })))
|
||||
});
|
||||
userInput.value = '';
|
||||
}
|
||||
}
|
||||
|
||||
socket.on('thinking', (data) => {
|
||||
if (currentChatId) {
|
||||
const newThinkingSection = createThinkingSection();
|
||||
chats[currentChatId].thinkingSections.push({ thoughts: [] });
|
||||
addThought(data.step, 'Started', '', newThinkingSection);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('thought', (data) => {
|
||||
if (currentChatId) {
|
||||
const currentThinkingSection = chatContainer.querySelector('.thinking-section:last-child');
|
||||
addThought(data.type, data.content, data.details, currentThinkingSection);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('chat_response', (data) => {
|
||||
if (currentChatId) {
|
||||
addMessage(data.response, false);
|
||||
}
|
||||
});
|
||||
|
||||
socket.on('error', (data) => {
|
||||
if (currentChatId) {
|
||||
const currentThinkingSection = chatContainer.querySelector('.thinking-section:last-child');
|
||||
if (data.type === 'retrying') {
|
||||
addThought('error', data.content, '', currentThinkingSection);
|
||||
} else {
|
||||
addThought('error', data.message, '', currentThinkingSection);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
sendButton.addEventListener('click', sendMessage);
|
||||
userInput.addEventListener('keypress', function(e) {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
@ -190,6 +593,224 @@
|
||||
sendMessage();
|
||||
}
|
||||
});
|
||||
|
||||
// Add new chat button
|
||||
const newChatButton = document.createElement('button');
|
||||
newChatButton.id = 'new-chat-button';
|
||||
newChatButton.textContent = '+ New Chat';
|
||||
newChatButton.onclick = createNewChat;
|
||||
chatTabs.appendChild(newChatButton);
|
||||
|
||||
// Load chats when the page loads
|
||||
loadChats();
|
||||
|
||||
const chartOptions = {
|
||||
type: 'line',
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
animation: false,
|
||||
elements: {
|
||||
line: {
|
||||
tension: 0
|
||||
},
|
||||
point: {
|
||||
radius: 0
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
type: 'time',
|
||||
time: {
|
||||
unit: 'second',
|
||||
displayFormats: {
|
||||
second: 'HH:mm:ss'
|
||||
}
|
||||
},
|
||||
ticks: {
|
||||
display: false
|
||||
}
|
||||
},
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
max: 100,
|
||||
ticks: {
|
||||
callback: function(value) {
|
||||
return value + '%';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
plugins: {
|
||||
legend: {
|
||||
display: false
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const cpuChart = new Chart(document.getElementById('cpuChart').getContext('2d'), {
|
||||
...chartOptions,
|
||||
data: {
|
||||
datasets: [{
|
||||
label: 'CPU Load',
|
||||
data: [],
|
||||
borderColor: 'rgb(75, 192, 192)',
|
||||
fill: false
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
const memoryChart = new Chart(document.getElementById('memoryChart').getContext('2d'), {
|
||||
...chartOptions,
|
||||
data: {
|
||||
datasets: [{
|
||||
label: 'Memory Usage',
|
||||
data: [],
|
||||
borderColor: 'rgb(255, 159, 64)',
|
||||
fill: false
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
const diskChart = new Chart(document.getElementById('diskChart').getContext('2d'), {
|
||||
...chartOptions,
|
||||
options: {
|
||||
...chartOptions.options,
|
||||
scales: {
|
||||
...chartOptions.options.scales,
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
ticks: {
|
||||
callback: function(value) {
|
||||
return (value / 1024 / 1024).toFixed(2) + ' MB/s';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
data: {
|
||||
datasets: [{
|
||||
label: 'Disk Read',
|
||||
data: [],
|
||||
borderColor: 'rgb(54, 162, 235)',
|
||||
fill: false
|
||||
},
|
||||
{
|
||||
label: 'Disk Write',
|
||||
data: [],
|
||||
borderColor: 'rgb(255, 99, 132)',
|
||||
fill: false
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
const gpuChart = new Chart(document.getElementById('gpuChart').getContext('2d'), {
|
||||
...chartOptions,
|
||||
data: {
|
||||
datasets: [{
|
||||
label: 'GPU Load',
|
||||
data: [],
|
||||
borderColor: 'rgb(153, 102, 255)',
|
||||
fill: false
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
const gpuMemoryChart = new Chart(document.getElementById('gpuMemoryChart').getContext('2d'), {
|
||||
...chartOptions,
|
||||
data: {
|
||||
datasets: [{
|
||||
label: 'GPU Memory',
|
||||
data: [],
|
||||
borderColor: 'rgb(255, 206, 86)',
|
||||
fill: false
|
||||
}]
|
||||
}
|
||||
});
|
||||
|
||||
function updateCharts(data) {
|
||||
if (sidebar.classList.contains('collapsed')) return;
|
||||
|
||||
const now = Date.now();
|
||||
const thirtySecondsAgo = now - 30000;
|
||||
|
||||
function updateChart(chart, value) {
|
||||
chart.data.datasets[0].data.push({x: now, y: value});
|
||||
chart.data.datasets[0].data = chart.data.datasets[0].data.filter(point => point.x > thirtySecondsAgo);
|
||||
chart.update('none');
|
||||
}
|
||||
|
||||
updateChart(cpuChart, data.cpu_load);
|
||||
updateChart(memoryChart, data.memory_usage);
|
||||
updateChart(gpuChart, data.gpu_load);
|
||||
updateChart(gpuMemoryChart, data.gpu_memory);
|
||||
|
||||
// Update disk chart (it has two datasets)
|
||||
diskChart.data.datasets[0].data.push({x: now, y: data.disk_read_rate});
|
||||
diskChart.data.datasets[1].data.push({x: now, y: data.disk_write_rate});
|
||||
diskChart.data.datasets[0].data = diskChart.data.datasets[0].data.filter(point => point.x > thirtySecondsAgo);
|
||||
diskChart.data.datasets[1].data = diskChart.data.datasets[1].data.filter(point => point.x > thirtySecondsAgo);
|
||||
diskChart.update('none');
|
||||
}
|
||||
|
||||
// Listen for system resource updates
|
||||
socket.on('system_resources', (data) => {
|
||||
updateCharts(data);
|
||||
});
|
||||
|
||||
const sidebar = document.getElementById('sidebar');
|
||||
const sidebarToggle = document.getElementById('sidebar-toggle');
|
||||
|
||||
sidebarToggle.addEventListener('click', () => {
|
||||
sidebar.classList.toggle('collapsed');
|
||||
});
|
||||
|
||||
function checkWindowSize() {
|
||||
if (window.innerWidth <= 768) {
|
||||
sidebar.classList.add('collapsed');
|
||||
} else {
|
||||
sidebar.classList.remove('collapsed');
|
||||
}
|
||||
}
|
||||
|
||||
window.addEventListener('resize', checkWindowSize);
|
||||
checkWindowSize(); // Initial check
|
||||
|
||||
// Add this new function to update the conversation history
|
||||
function updateConversationHistory(history) {
|
||||
const conversationHistoryElement = document.getElementById('conversation-history');
|
||||
conversationHistoryElement.innerHTML = '';
|
||||
|
||||
history.forEach(item => {
|
||||
const card = document.createElement('div');
|
||||
card.classList.add('history-card');
|
||||
|
||||
const role = document.createElement('div');
|
||||
role.classList.add('history-role');
|
||||
role.textContent = item.role.charAt(0).toUpperCase() + item.role.slice(1);
|
||||
|
||||
const content = document.createElement('pre');
|
||||
content.classList.add('history-content');
|
||||
content.innerHTML = hljs.highlightAuto(item.content).value;
|
||||
|
||||
card.appendChild(role);
|
||||
card.appendChild(content);
|
||||
conversationHistoryElement.appendChild(card);
|
||||
});
|
||||
}
|
||||
|
||||
// Add this new socket listener
|
||||
socket.on('conversation_history', (data) => {
|
||||
updateConversationHistory(data.history);
|
||||
});
|
||||
|
||||
// Add event listener for the clear history button
|
||||
clearHistoryButton.addEventListener('click', () => {
|
||||
if (confirm('Are you sure you want to clear the conversation history?')) {
|
||||
clearConversationHistory();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
94
models.py
94
models.py
@ -3,30 +3,86 @@ import structlog
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
|
||||
class ModelManager:
|
||||
def __init__(self):
|
||||
self.model_capabilities = {
|
||||
"qwen2.5:7b": ["general_knowledge", "structured_output", "multilingual", "instruction_following", "structured_data"],
|
||||
"llama3.1:8b": ["general_knowledge", "reasoning", "tool_calling", "conversation", "multilingual", "instruction_following"],
|
||||
"qwen2.5-coder:7b": ["code_generation", "code_analysis", "instruction_following", "math_reasoning"],
|
||||
"llama3.2:3b": ["summarization", "instruction_following", "tool_calling", "multilingual"],
|
||||
"llava:7b": ["visual_reasoning", "visual_conversation", "visual_tool_calling", "vision", "ocr", "multimodal"],
|
||||
"ajindal/llama3.1-storm:8b": [
|
||||
"general_knowledge",
|
||||
"reasoning",
|
||||
"tool_calling",
|
||||
"conversation",
|
||||
"multilingual",
|
||||
"instruction_following",
|
||||
],
|
||||
"llama3.1:8b": [
|
||||
"general_knowledge",
|
||||
"reasoning",
|
||||
"tool_calling",
|
||||
"conversation",
|
||||
"multilingual",
|
||||
"instruction_following",
|
||||
],
|
||||
"qwen2.5:7b": [
|
||||
"general_knowledge",
|
||||
"reasoning",
|
||||
"tool_calling",
|
||||
"conversation",
|
||||
"multilingual",
|
||||
"instruction_following",
|
||||
],
|
||||
"llama3.2:3b": [
|
||||
"summarization",
|
||||
"instruction_following",
|
||||
"tool_calling",
|
||||
"multilingual",
|
||||
],
|
||||
"llava:7b": [
|
||||
"visual_reasoning",
|
||||
"visual_conversation",
|
||||
"visual_tool_calling",
|
||||
"vision",
|
||||
"ocr",
|
||||
"multimodal",
|
||||
],
|
||||
}
|
||||
logger.info("ModelManager initialized", model_capabilities=self.model_capabilities)
|
||||
logger.info(
|
||||
"ModelManager initialized", model_capabilities=self.model_capabilities
|
||||
)
|
||||
|
||||
def get_model_capabilities(self, model_name):
|
||||
capabilities = self.model_capabilities.get(model_name, [])
|
||||
logger.debug("Retrieved model capabilities", model=model_name, capabilities=capabilities)
|
||||
logger.debug(
|
||||
"Retrieved model capabilities", model=model_name, capabilities=capabilities
|
||||
)
|
||||
return capabilities
|
||||
|
||||
def select_best_model(self, required_capability):
|
||||
suitable_models = [model for model, capabilities in self.model_capabilities.items() if required_capability in capabilities]
|
||||
selected_model = suitable_models[0] if suitable_models else list(self.model_capabilities.keys())[0]
|
||||
logger.info("Selected best model", required_capability=required_capability, selected_model=selected_model)
|
||||
suitable_models = [
|
||||
model
|
||||
for model, capabilities in self.model_capabilities.items()
|
||||
if required_capability in capabilities
|
||||
]
|
||||
selected_model = (
|
||||
suitable_models[0]
|
||||
if suitable_models
|
||||
else list(self.model_capabilities.keys())[0]
|
||||
)
|
||||
logger.info(
|
||||
"Selected best model",
|
||||
required_capability=required_capability,
|
||||
selected_model=selected_model,
|
||||
)
|
||||
return selected_model
|
||||
|
||||
def generate_text(self, model_name, prompt, max_length=100, system="You are a helpful assistant."):
|
||||
logger.debug("Generating text", model=model_name, prompt=prompt, max_length=max_length)
|
||||
def generate_text(
|
||||
self,
|
||||
model_name,
|
||||
prompt,
|
||||
max_length=100,
|
||||
system="You are a helpful assistant.",
|
||||
tools=[],
|
||||
):
|
||||
# Check if model exists
|
||||
try:
|
||||
ollama.pull(model_name)
|
||||
@ -38,8 +94,16 @@ class ModelManager:
|
||||
else:
|
||||
logger.exception("Error pulling model", model=model_name, error=str(e))
|
||||
raise e
|
||||
response = ollama.generate(model=model_name, prompt=prompt, system=system)
|
||||
logger.debug("Text generated", model=model_name, response=response['response'])
|
||||
return response['response']
|
||||
|
||||
response = ollama.generate(
|
||||
model=model_name,
|
||||
prompt=prompt,
|
||||
system=system,
|
||||
tools=tools,
|
||||
max_tokens=max_length,
|
||||
)
|
||||
logger.debug("Text generated", model=model_name, response=response["response"])
|
||||
return response["response"]
|
||||
|
||||
|
||||
model_manager = ModelManager()
|
@ -4,11 +4,19 @@ aiohttp==3.10.5
|
||||
aiosignal==1.3.1
|
||||
annotated-types==0.7.0
|
||||
anyio==4.6.0
|
||||
art==6.3
|
||||
attrs==24.2.0
|
||||
beautifulsoup4==4.12.3
|
||||
bidict==0.23.1
|
||||
black==24.8.0
|
||||
blinker==1.8.2
|
||||
bs4==0.0.2
|
||||
certifi==2024.7.4
|
||||
chardet==5.2.0
|
||||
charset-normalizer==3.3.2
|
||||
click==8.1.7
|
||||
cloudpickle==3.0.0
|
||||
cssselect==1.2.0
|
||||
datasets==3.0.0
|
||||
dill==0.3.8
|
||||
diskcache==5.6.3
|
||||
@ -17,9 +25,13 @@ duckduckgo_search==6.2.6
|
||||
einops==0.8.0
|
||||
fastapi==0.115.0
|
||||
filelock==3.15.4
|
||||
Flask==3.0.3
|
||||
flask-openapi3==3.1.3
|
||||
Flask-SocketIO==5.3.7
|
||||
frozenlist==1.4.1
|
||||
fsspec==2024.6.1
|
||||
gguf==0.9.1
|
||||
GPUtil==1.4.0
|
||||
h11==0.14.0
|
||||
httpcore==1.0.5
|
||||
httptools==0.6.1
|
||||
@ -29,6 +41,8 @@ idna==3.7
|
||||
importlib_metadata==8.5.0
|
||||
inquirerpy==0.3.4
|
||||
interegular==0.3.3
|
||||
isort==5.13.2
|
||||
itsdangerous==2.2.0
|
||||
Jinja2==3.1.4
|
||||
jiter==0.5.0
|
||||
jsonschema==4.23.0
|
||||
@ -36,6 +50,9 @@ jsonschema-specifications==2023.12.1
|
||||
lark==1.2.2
|
||||
llvmlite==0.43.0
|
||||
lm-format-enforcer==0.10.6
|
||||
lxml==5.3.0
|
||||
lxml_html_clean==0.2.2
|
||||
markdownify==0.13.1
|
||||
MarkupSafe==2.1.5
|
||||
mistral_common==1.4.3
|
||||
mpmath==1.3.0
|
||||
@ -43,6 +60,7 @@ msgpack==1.1.0
|
||||
msgspec==0.18.6
|
||||
multidict==6.1.0
|
||||
multiprocess==0.70.16
|
||||
mypy-extensions==1.0.0
|
||||
nest-asyncio==1.6.0
|
||||
networkx==3.3
|
||||
numba==0.60.0
|
||||
@ -60,13 +78,16 @@ nvidia-ml-py==12.560.30
|
||||
nvidia-nccl-cu12==2.20.5
|
||||
nvidia-nvjitlink-cu12==12.6.20
|
||||
nvidia-nvtx-cu12==12.1.105
|
||||
ollama==0.3.3
|
||||
openai==1.47.1
|
||||
outlines==0.0.46
|
||||
packaging==24.1
|
||||
pandas==2.2.3
|
||||
partial-json-parser==0.2.1.1.post4
|
||||
pathspec==0.12.1
|
||||
pfzy==0.3.4
|
||||
pillow==10.4.0
|
||||
platformdirs==4.3.6
|
||||
primp==0.5.5
|
||||
prometheus-fastapi-instrumentator==7.0.0
|
||||
prometheus_client==0.21.0
|
||||
@ -81,10 +102,13 @@ pydantic==2.9.2
|
||||
pydantic_core==2.23.4
|
||||
python-dateutil==2.9.0.post0
|
||||
python-dotenv==1.0.1
|
||||
python-engineio==4.9.1
|
||||
python-socketio==5.11.4
|
||||
pytz==2024.2
|
||||
PyYAML==6.0.2
|
||||
pyzmq==26.2.0
|
||||
ray==2.36.1
|
||||
readability-lxml==0.8.1
|
||||
referencing==0.35.1
|
||||
regex==2024.7.24
|
||||
requests==2.32.3
|
||||
@ -92,9 +116,12 @@ rpds-py==0.20.0
|
||||
safetensors==0.4.4
|
||||
sentencepiece==0.2.0
|
||||
setuptools==72.1.0
|
||||
simple-websocket==1.0.0
|
||||
six==1.16.0
|
||||
sniffio==1.3.1
|
||||
soupsieve==2.6
|
||||
starlette==0.38.6
|
||||
structlog==24.4.0
|
||||
sympy==1.13.2
|
||||
tiktoken==0.7.0
|
||||
tokenizers==0.19.1
|
||||
@ -113,6 +140,8 @@ vllm-flash-attn==2.6.1
|
||||
watchfiles==0.24.0
|
||||
wcwidth==0.2.13
|
||||
websockets==13.1
|
||||
Werkzeug==3.0.4
|
||||
wsproto==1.2.0
|
||||
xformers==0.0.27.post2
|
||||
xxhash==3.5.0
|
||||
yarl==1.12.0
|
||||
|
16
schema.sql
Normal file
16
schema.sql
Normal file
@ -0,0 +1,16 @@
|
||||
CREATE TABLE IF NOT EXISTS Keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT NOT NULL UNIQUE,
|
||||
api_key TEXT NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS Queries (
|
||||
id TEXT PRIMARY KEY,
|
||||
ip TEXT NOT NULL,
|
||||
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
query TEXT NOT NULL,
|
||||
api_key_id INTEGER,
|
||||
status TEXT NOT NULL,
|
||||
conversation_history TEXT,
|
||||
FOREIGN KEY (api_key_id) REFERENCES Keys (id)
|
||||
);
|
369
tools.py
Normal file
369
tools.py
Normal file
@ -0,0 +1,369 @@
|
||||
import subprocess
|
||||
import tempfile
|
||||
import time
|
||||
import json
|
||||
import requests
|
||||
from markdownify import markdownify as md
|
||||
from readability.readability import Document
|
||||
import duckduckgo_search
|
||||
import datetime
|
||||
import random
|
||||
import math
|
||||
import re
|
||||
import base64
|
||||
from io import BytesIO
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import ollama
|
||||
import os
|
||||
|
||||
class Tool:
|
||||
def __init__(self, name: str, description: str, arguments: dict, returns: str):
|
||||
self.name = name
|
||||
self.description = description
|
||||
self.arguments = arguments
|
||||
self.returns = returns
|
||||
|
||||
def execute(self, arguments: dict) -> str:
|
||||
pass
|
||||
|
||||
|
||||
class ToolManager:
|
||||
def __init__(self):
|
||||
self.tools = []
|
||||
|
||||
def add_tool(self, tool: Tool):
|
||||
self.tools.append(tool)
|
||||
|
||||
def get_tool(self, name: str) -> Tool:
|
||||
for tool in self.tools:
|
||||
if tool.name == name:
|
||||
return tool
|
||||
return None
|
||||
|
||||
def get_tools_and_descriptions_for_prompt(self):
|
||||
return "\n".join([f"{tool.name}: {tool.description}" for tool in self.tools])
|
||||
|
||||
def get_tools_for_ollama_dict(self):
|
||||
return [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": tool.name,
|
||||
"description": tool.description,
|
||||
"parameters": tool.arguments,
|
||||
},
|
||||
}
|
||||
for tool in self.tools
|
||||
]
|
||||
|
||||
|
||||
class DefaultToolManager(ToolManager):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.add_tool(SearchTool())
|
||||
self.add_tool(GetReadablePageContentsTool())
|
||||
self.add_tool(CalculatorTool())
|
||||
self.add_tool(PythonCodeTool())
|
||||
self.add_tool(DateTimeTool())
|
||||
self.add_tool(RandomNumberTool())
|
||||
self.add_tool(RegexTool())
|
||||
self.add_tool(Base64Tool())
|
||||
self.add_tool(SimpleChartTool())
|
||||
self.add_tool(LLAVAImageAnalysisTool())
|
||||
|
||||
|
||||
class SearchTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"search_web",
|
||||
"Search the internet for information",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {"type": "string", "description": "The search query"}
|
||||
},
|
||||
},
|
||||
"results:list[string]",
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
try:
|
||||
res = duckduckgo_search.DDGS().text(arg["query"], max_results=5)
|
||||
return "\n\n".join([f"{r['title']}\n{r['body']}\n{r['href']}" for r in res])
|
||||
except Exception as e:
|
||||
return f"Error searching the web: {str(e)}"
|
||||
|
||||
|
||||
def get_readable_page_contents(url: str) -> str:
|
||||
try:
|
||||
response = requests.get(url)
|
||||
response.raise_for_status()
|
||||
doc = Document(response.content)
|
||||
content = doc.summary()
|
||||
return md(content)
|
||||
except Exception as e:
|
||||
return f"Error fetching readable content: {str(e)}"
|
||||
|
||||
|
||||
class GetReadablePageContentsTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"get_readable_page_contents",
|
||||
"Get the contents of a web page in a readable format",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {"type": "string", "description": "The url of the web page"}
|
||||
},
|
||||
},
|
||||
"contents:string",
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
return get_readable_page_contents(arg["url"])
|
||||
|
||||
|
||||
class CalculatorTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"calculator",
|
||||
"Perform a calculation using python's eval function",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"expression": {
|
||||
"type": "string",
|
||||
"description": "The mathematical expression to evaluate, should be a python mathematical expression",
|
||||
}
|
||||
},
|
||||
},
|
||||
"result:string",
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
try:
|
||||
return str(eval(arg["expression"]))
|
||||
except Exception as e:
|
||||
return f"Error executing code: {str(e)}"
|
||||
|
||||
|
||||
class PythonCodeTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"python_code",
|
||||
"Execute python code using a temporary file and a subprocess. You must print results to stdout.",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string",
|
||||
"description": "The python code to execute, can be multiline",
|
||||
}
|
||||
},
|
||||
},
|
||||
"result:string",
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
suffix=".py", mode="w", delete=False
|
||||
) as temp_file:
|
||||
temp_file.write(arg["code"])
|
||||
temp_file.flush()
|
||||
|
||||
start_time = time.time()
|
||||
process = subprocess.Popen(
|
||||
["python", temp_file.name],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
stdout, stderr = process.communicate(timeout=10) # 10 second timeout
|
||||
end_time = time.time()
|
||||
execution_time = end_time - start_time
|
||||
|
||||
result = {
|
||||
"stdout": stdout,
|
||||
"stderr": stderr,
|
||||
"return_value": process.returncode,
|
||||
"execution_time": execution_time,
|
||||
}
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
process.kill()
|
||||
return "Error: Code execution timed out after 10 seconds"
|
||||
except Exception as e:
|
||||
return f"Error executing code: {str(e)}"
|
||||
|
||||
return "\n".join([f"{k}:\n{v}" for k, v in result.items()])
|
||||
|
||||
|
||||
class DateTimeTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"get_current_datetime",
|
||||
"Get the current date and time",
|
||||
{"type": "object", "properties": {}},
|
||||
"datetime:string"
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
class RandomNumberTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"generate_random_number",
|
||||
"Generate a random number within a given range",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"min": {"type": "number", "description": "The minimum value"},
|
||||
"max": {"type": "number", "description": "The maximum value"}
|
||||
}
|
||||
},
|
||||
"random_number:number"
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
return str(random.uniform(arg["min"], arg["max"]))
|
||||
|
||||
|
||||
class RegexTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"regex_match",
|
||||
"Perform a regex match on a given text",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"text": {"type": "string", "description": "The text to search in"},
|
||||
"pattern": {"type": "string", "description": "The regex pattern to match"}
|
||||
}
|
||||
},
|
||||
"matches:list[string]"
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
matches = re.findall(arg["pattern"], arg["text"])
|
||||
return json.dumps(matches)
|
||||
|
||||
|
||||
class Base64Tool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"base64_encode_decode",
|
||||
"Encode or decode a string using Base64",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"action": {"type": "string", "enum": ["encode", "decode"], "description": "Whether to encode or decode"},
|
||||
"text": {"type": "string", "description": "The text to encode or decode"}
|
||||
}
|
||||
},
|
||||
"result:string"
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
if arg["action"] == "encode":
|
||||
return base64.b64encode(arg["text"].encode()).decode()
|
||||
elif arg["action"] == "decode":
|
||||
return base64.b64decode(arg["text"].encode()).decode()
|
||||
else:
|
||||
return "Invalid action. Use 'encode' or 'decode'."
|
||||
|
||||
|
||||
class SimpleChartTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"generate_simple_chart",
|
||||
"Generate a simple bar chart image",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {"type": "array", "items": {"type": "number"}, "description": "List of numerical values for the chart"},
|
||||
"labels": {"type": "array", "items": {"type": "string"}, "description": "Labels for each bar"}
|
||||
}
|
||||
},
|
||||
"image_base64:string"
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
data = arg["data"]
|
||||
labels = arg["labels"]
|
||||
|
||||
# Create a simple bar chart
|
||||
width, height = 400, 300
|
||||
img = Image.new('RGB', (width, height), color='white')
|
||||
draw = ImageDraw.Draw(img)
|
||||
|
||||
# Draw bars
|
||||
max_value = max(data)
|
||||
bar_width = width // (len(data) + 1)
|
||||
for i, value in enumerate(data):
|
||||
bar_height = (value / max_value) * (height - 50)
|
||||
left = (i + 1) * bar_width
|
||||
draw.rectangle([left, height - bar_height, left + bar_width, height], fill='blue')
|
||||
|
||||
# Add labels
|
||||
font = ImageFont.load_default()
|
||||
for i, label in enumerate(labels):
|
||||
left = (i + 1) * bar_width + bar_width // 2
|
||||
draw.text((left, height - 20), label, fill='black', anchor='ms', font=font)
|
||||
|
||||
# Convert to base64
|
||||
buffered = BytesIO()
|
||||
img.save(buffered, format="PNG")
|
||||
img_str = base64.b64encode(buffered.getvalue()).decode()
|
||||
return img_str
|
||||
|
||||
|
||||
class LLAVAImageAnalysisTool(Tool):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
"analyze_image",
|
||||
"Analyze an image using the LLAVA model",
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"image_base64": {"type": "string", "description": "Base64 encoded image"},
|
||||
"question": {"type": "string", "description": "Question about the image"}
|
||||
}
|
||||
},
|
||||
"analysis:string"
|
||||
)
|
||||
|
||||
def execute(self, arg: dict) -> str:
|
||||
try:
|
||||
# Decode base64 image
|
||||
image_data = base64.b64decode(arg["image_base64"])
|
||||
image = Image.open(BytesIO(image_data))
|
||||
|
||||
# Save image to a temporary file
|
||||
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as temp_file:
|
||||
image.save(temp_file, format="PNG")
|
||||
temp_file_path = temp_file.name
|
||||
|
||||
# Call LLAVA model
|
||||
response = ollama.chat(
|
||||
model="llava:7b",
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": arg["question"],
|
||||
"images": [temp_file_path]
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
# Clean up temporary file
|
||||
os.remove(temp_file_path)
|
||||
|
||||
# Unload LLAVA model
|
||||
ollama.delete("llava:7b")
|
||||
|
||||
return response['message']['content']
|
||||
except Exception as e:
|
||||
return f"Error analyzing image: {str(e)}"
|
Reference in New Issue
Block a user