Added Application HolidayViewer
This commit is contained in:
39
src/components/aibuddy/AIBuddyApp.py
Normal file
39
src/components/aibuddy/AIBuddyApp.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import logging
|
||||
|
||||
from fasthtml.fastapp import fast_app
|
||||
|
||||
from components.aibuddy.constants import Routes
|
||||
from core.instance_manager import debug_session, InstanceManager
|
||||
|
||||
ai_buddy_app, rt = fast_app()
|
||||
|
||||
logger = logging.getLogger("AIBuddy")
|
||||
|
||||
|
||||
@rt(Routes.Request)
|
||||
def get(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.Request} - GET with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.show_request_form()
|
||||
|
||||
|
||||
@rt(Routes.Request)
|
||||
async def post(session, _id: str, q: str):
|
||||
logger.debug(f"Entering {Routes.Request} - POST with args {debug_session(session)}, {_id=}, {q=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
res = await instance.make_async_ai_request(q)
|
||||
return res
|
||||
|
||||
|
||||
@rt(Routes.ResetRequest)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.ResetRequest} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.reset_ai_request()
|
||||
|
||||
|
||||
@rt(Routes.LlmStatus)
|
||||
def get(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.LlmStatus} - GET with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.get_llm_status()
|
||||
11
src/components/aibuddy/Readme.md
Normal file
11
src/components/aibuddy/Readme.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# id
|
||||
|
||||
**Datagrid ids**:
|
||||
|
||||
using `AIBuddy(id=my_id)`
|
||||
|
||||
| Name | value |
|
||||
|----------------------------|----------------|
|
||||
| Status icon | `s_{self._id}` |
|
||||
| Question (input) component | `q_{self._id}` |
|
||||
| Answer component | `a_{self._id}` |
|
||||
0
src/components/aibuddy/__init__.py
Normal file
0
src/components/aibuddy/__init__.py
Normal file
52
src/components/aibuddy/assets/AIBuddy.js
Normal file
52
src/components/aibuddy/assets/AIBuddy.js
Normal file
@@ -0,0 +1,52 @@
|
||||
function bindAIBuddy(elementId) {
|
||||
console.debug("bindAIBuddy on element " + elementId);
|
||||
|
||||
const aiBuddy = document.getElementById(elementId);
|
||||
|
||||
if (!aiBuddy) {
|
||||
console.error(`AIBuddy with id "${elementId}" not found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
let lastShiftPress = 0;
|
||||
const doublePressDelay = 300;
|
||||
|
||||
const makeAIRequest = () => {
|
||||
const targetID = "q_" + elementId;
|
||||
|
||||
htmx.ajax('GET', '/ai/request', {
|
||||
target: `#${targetID}`,
|
||||
headers: {"Content-Type": "application/x-www-form-urlencoded"},
|
||||
swap: "outerHTML",
|
||||
values: {
|
||||
_id: elementId,
|
||||
}
|
||||
});
|
||||
|
||||
// Listen for the htmx:afterSwap event to focus on the input
|
||||
document.addEventListener('htmx:afterSwap', function(event) {
|
||||
if (event.target.id === targetID) {
|
||||
event.target.focus();
|
||||
}
|
||||
|
||||
// Remove this event listener after it's been triggered
|
||||
document.removeEventListener('htmx:afterSwap', arguments.callee);
|
||||
});
|
||||
};
|
||||
|
||||
document.addEventListener('keydown', (event) => {
|
||||
if (event.ctrlKey && event.key === 'k') {
|
||||
event.preventDefault();
|
||||
makeAIRequest();
|
||||
}
|
||||
|
||||
if (event.key === 'Shift') {
|
||||
const currentTime = new Date().getTime();
|
||||
if (currentTime - lastShiftPress <= doublePressDelay) {
|
||||
event.preventDefault();
|
||||
makeAIRequest();
|
||||
}
|
||||
lastShiftPress = currentTime;
|
||||
}
|
||||
});
|
||||
}
|
||||
0
src/components/aibuddy/assets/__init__.py
Normal file
0
src/components/aibuddy/assets/__init__.py
Normal file
17
src/components/aibuddy/assets/icons.py
Normal file
17
src/components/aibuddy/assets/icons.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
# Fluent BrainCircuit20Regular
|
||||
icon_brain_ok = NotStr("""<svg name="ai_ok" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20">
|
||||
<g fill="none">
|
||||
<path d="M6.13 2.793A3.91 3.91 0 0 1 8.5 2a1.757 1.757 0 0 1 1.5.78A1.757 1.757 0 0 1 11.5 2a3.91 3.91 0 0 1 2.37.793c.525.408.93.973 1.073 1.656c.328.025.628.161.88.366c.382.31.66.775.835 1.267c.274.765.348 1.74.064 2.57c.072.034.143.074.212.12c.275.183.484.445.638.754c.303.605.428 1.449.428 2.474c0 1.141-.435 1.907-.987 2.38a2.68 2.68 0 0 1-1.054.555c-.1.558-.38 1.204-.819 1.752C14.57 17.402 13.686 18 12.5 18c-.94 0-1.688-.52-2.174-1.03a4.252 4.252 0 0 1-.326-.385a4.245 4.245 0 0 1-.326.385C9.188 17.48 8.441 18 7.5 18c-1.186 0-2.069-.598-2.64-1.313a4.057 4.057 0 0 1-.819-1.752a2.68 2.68 0 0 1-1.054-.555C2.435 13.907 2 13.14 2 12c0-1.025.126-1.87.428-2.474c.154-.309.363-.57.638-.755a1.58 1.58 0 0 1 .212-.118c-.284-.832-.21-1.806.064-2.571c.175-.492.453-.957.835-1.267c.252-.205.552-.34.88-.366c.144-.683.549-1.248 1.074-1.656zM9.5 4.5V4.49l-.002-.05a2.744 2.744 0 0 0-.154-.764a1.222 1.222 0 0 0-.309-.49A.76.76 0 0 0 8.5 3a2.91 2.91 0 0 0-1.756.582C6.28 3.943 6 4.432 6 5a.5.5 0 0 1-.658.474c-.188-.062-.356-.027-.535.117c-.196.16-.387.444-.524.827c-.279.782-.25 1.729.133 2.305A.5.5 0 0 1 4.5 9h.75a2.25 2.25 0 0 1 2.25 2.25v.335a1.5 1.5 0 1 1-1 0v-.335c0-.69-.56-1.25-1.25-1.25H3.5a.499.499 0 0 1-.175-.032l-.003.006C3.124 10.369 3 11.025 3 12c0 .859.315 1.343.638 1.62c.347.298.732.38.862.38a.5.5 0 0 1 .5.5c0 .368.2 1.011.64 1.563c.429.535 1.046.937 1.86.937c.56 0 1.062-.313 1.45-.72c.191-.2.34-.407.437-.577a1.573 1.573 0 0 0 .113-.236V7.5H8.415a1.5 1.5 0 1 1 0-1H9.5v-2zm1 9.999v.967a1.575 1.575 0 0 0 .113.236c.098.17.246.377.436.577c.389.407.892.72 1.451.72c.814 0 1.431-.402 1.86-.937c.44-.552.64-1.195.64-1.563a.5.5 0 0 1 .5-.5c.13 0 .515-.082.862-.38c.323-.277.638-.761.638-1.62c0-.975-.125-1.63-.322-2.026a.923.923 0 0 0-.3-.37A.657.657 0 0 0 16 9.5a.5.5 0 0 1-.416-.777c.384-.576.412-1.523.133-2.305c-.137-.383-.328-.668-.524-.827c-.179-.144-.347-.18-.535-.117A.5.5 0 0 1 14 5c0-.568-.28-1.057-.745-1.418A2.91 2.91 0 0 0 11.5 3a.76.76 0 0 0-.535.186a1.22 1.22 0 0 0-.31.49a2.579 2.579 0 0 0-.155.814v9.01h.75c.69 0 1.25-.56 1.25-1.25v-1.835a1.5 1.5 0 1 1 1 0v1.835a2.25 2.25 0 0 1-2.25 2.25h-.75zM6.5 7a.5.5 0 1 0 1 0a.5.5 0 0 0-1 0zM13 9.5a.5.5 0 1 0 0-1a.5.5 0 0 0 0 1zm-6 3a.5.5 0 1 0 0 1a.5.5 0 0 0 0-1z" fill="currentColor">
|
||||
</path>
|
||||
</g>
|
||||
</svg>""")
|
||||
|
||||
# Fluent Warning20Regular
|
||||
icon_brain_warning = NotStr("""<svg name="ai_nok" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20">
|
||||
<g fill="none">
|
||||
<path d="M10 7a.5.5 0 0 1 .5.5v4a.5.5 0 0 1-1 0v-4A.5.5 0 0 1 10 7zm0 7.5a.75.75 0 1 0 0-1.5a.75.75 0 0 0 0 1.5zM8.686 2.852a1.5 1.5 0 0 1 2.628 0l6.56 11.925A1.5 1.5 0 0 1 16.558 17H3.44a1.5 1.5 0 0 1-1.314-2.223L8.686 2.852zm1.752.482a.5.5 0 0 0-.876 0L3.003 15.26a.5.5 0 0 0 .438.741H16.56a.5.5 0 0 0 .438-.74L10.438 3.333z" fill="currentColor">
|
||||
</path>
|
||||
</g>
|
||||
</svg>""")
|
||||
32
src/components/aibuddy/commands.py
Normal file
32
src/components/aibuddy/commands.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from components.BaseCommandManager import BaseCommandManager
|
||||
from components.aibuddy.constants import ROUTE_ROOT, Routes
|
||||
|
||||
|
||||
class AIBuddyCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
super().__init__(owner)
|
||||
|
||||
def make_request(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.Request}",
|
||||
"hx-target": f"#a_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def reset_request(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.ResetRequest}",
|
||||
"hx-target": f"#q_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def get_llm_status(self):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.LlmStatus}",
|
||||
"hx-target": f"#s_{self._id}",
|
||||
"hx-trigger": "every 2s",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}"}}',
|
||||
}
|
||||
217
src/components/aibuddy/components/AIBuddy.py
Normal file
217
src/components/aibuddy/components/AIBuddy.py
Normal file
@@ -0,0 +1,217 @@
|
||||
import asyncio
|
||||
import threading
|
||||
from datetime import datetime
|
||||
|
||||
import httpx
|
||||
from fasthtml.components import *
|
||||
from fasthtml.xtend import Script
|
||||
|
||||
from ai.debug_lmm import DebugRequest, DebugRequestMetadata, DebugConversation
|
||||
from ai.mcp_client import InProcessMCPClientCustomTools, InProcessMCPClientNativeTools
|
||||
from assets.icons import icon_dismiss_regular
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.aibuddy.assets.icons import icon_brain_ok, icon_brain_warning
|
||||
from components.aibuddy.commands import AIBuddyCommandManager
|
||||
from components.aibuddy.constants import *
|
||||
from components.aibuddy.settings import AI_BUDDY_SETTINGS_ENTRY, AIBuddySettings
|
||||
from components_helpers import mk_ellipsis, mk_icon, mk_tooltip
|
||||
from config import OLLAMA_HOST
|
||||
from core.settings_management import GenericDbManager
|
||||
|
||||
|
||||
class AIBuddy(BaseComponent):
|
||||
def __init__(self, session, _id: str = None, settings_manager=None, tabs_manager=None):
|
||||
super().__init__(session, _id)
|
||||
self.settings_manager = settings_manager
|
||||
self.db = GenericDbManager(session, settings_manager, AI_BUDDY_SETTINGS_ENTRY, AIBuddySettings)
|
||||
self.tabs_manager = tabs_manager
|
||||
self.commands = AIBuddyCommandManager(self)
|
||||
self.llm_status = None
|
||||
self.mcp_clients = {
|
||||
InProcessMCPClientNativeTools.ID: InProcessMCPClientNativeTools(session, settings_manager, OLLAMA_HOST),
|
||||
InProcessMCPClientCustomTools.ID: InProcessMCPClientCustomTools(session, settings_manager, OLLAMA_HOST)
|
||||
}
|
||||
# self.db.mcp_client_mod
|
||||
# self.db.llm_use_tools
|
||||
self.conversations: list[DebugConversation] = self.db.conversations
|
||||
|
||||
# Check LLM status once at initialization in a background thread
|
||||
threading.Thread(target=self._initial_status_check, daemon=True).start()
|
||||
|
||||
def _initial_status_check(self):
|
||||
"""Check LLM status once in a background thread"""
|
||||
try:
|
||||
# Run the async function in a new event loop
|
||||
loop = asyncio.new_event_loop()
|
||||
self.llm_status = loop.run_until_complete(self.async_check_llm_active())
|
||||
loop.close()
|
||||
except Exception as e:
|
||||
self.llm_status = f"Error checking LLM status: {e}"
|
||||
|
||||
def get_conversations(self):
|
||||
return self.conversations
|
||||
|
||||
def show_request_form(self):
|
||||
return self.mk_input()
|
||||
|
||||
def register_components(self):
|
||||
return [
|
||||
(self.mk_input(), "TOP"),
|
||||
(self.mk_response(), "BOTTOM"),
|
||||
]
|
||||
|
||||
async def make_async_ai_request(self, request):
|
||||
if len(self.conversations) == 0:
|
||||
start = datetime.now()
|
||||
conversation = DebugConversation(f"{self.get_user_id()}{start.timestamp()}",
|
||||
int(start.timestamp()),
|
||||
request)
|
||||
self.conversations.append(conversation)
|
||||
else:
|
||||
conversation = self.conversations[-1]
|
||||
|
||||
debug = DebugRequest(request)
|
||||
conversation.requests.append(debug)
|
||||
|
||||
mcp_client = self.mcp_clients[self.db.mcp_client_mod]
|
||||
start = datetime.now() # datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
response = await mcp_client.generate_with_mcp_context(debug, request, self.db.llm_use_tools)
|
||||
|
||||
request_metadata = DebugRequestMetadata(f"{self.get_user_id()}{start.timestamp()}",
|
||||
self.get_user_id(),
|
||||
int(start.timestamp()),
|
||||
int(datetime.now().timestamp()),
|
||||
mcp_client.model,
|
||||
self.db.mcp_client_mod,
|
||||
self.db.llm_use_tools,
|
||||
self._debug_available_tools(mcp_client.available_tools))
|
||||
debug.metadata = request_metadata
|
||||
|
||||
self.db.conversations = self.conversations
|
||||
|
||||
return self.mk_response(response)
|
||||
|
||||
def reset_ai_request(self):
|
||||
return self.mk_input(True), self.mk_response(None, True)
|
||||
|
||||
def get_llm_status(self):
|
||||
return self.mk_status(), self.mk_input(True)
|
||||
|
||||
def mk_status(self):
|
||||
# Non-blocking, simply returns UI based on the current status
|
||||
if self.llm_status is None:
|
||||
return Span(cls="loading loading-infinity loading-sm", id=f"s_{self._id}", **self.commands.get_llm_status())
|
||||
elif self.llm_status is not True:
|
||||
return mk_tooltip(mk_icon(icon_brain_warning), self.llm_status, id=f"s_{self._id}")
|
||||
else:
|
||||
return mk_icon(icon_brain_ok, id=f"s_{self._id}")
|
||||
|
||||
def mk_input(self, oob=False):
|
||||
if self.llm_status is True:
|
||||
return self._mk_input_ok(oob)
|
||||
else:
|
||||
return Div(id=f"q_{self._id}", hx_swap_oob="true" if oob else None)
|
||||
|
||||
def mk_response(self, response=None, oob=False):
|
||||
if self.llm_status is True:
|
||||
return self._mk_response_ok(response, oob=oob)
|
||||
else:
|
||||
return Div(id=f"a_{self._id}",
|
||||
hx_swap_oob="true")
|
||||
|
||||
def _mk_input_ok(self, oob=False):
|
||||
return Div(
|
||||
Label(
|
||||
mk_icon(icon_brain_ok),
|
||||
Input(
|
||||
name="q",
|
||||
placeholder="Ask me anything...",
|
||||
tabindex="1",
|
||||
**self.commands.make_request()
|
||||
),
|
||||
cls="input",
|
||||
),
|
||||
mk_icon(icon_dismiss_regular, cls="ml-2", tooltip="Reset question", **self.commands.reset_request()),
|
||||
cls="flex",
|
||||
id=f"q_{self._id}",
|
||||
hx_swap_oob="true" if oob else None,
|
||||
)
|
||||
|
||||
def _mk_response_ok(self, response=None, oob=False):
|
||||
return Div(response or "Your response will appear here.",
|
||||
cls="w-full px-4 py-2 border border-gray-300 rounded-md",
|
||||
id=f"a_{self._id}",
|
||||
hx_swap_oob="true" if oob else None,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _debug_available_tools(available_tools):
|
||||
return [
|
||||
{"name": tool["name"],
|
||||
"parameters": tool["parameters"],
|
||||
"description": tool["description"]}
|
||||
for name, tool in available_tools.items()
|
||||
]
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
Div(cls="divider"),
|
||||
Div(
|
||||
mk_ellipsis("AI Buddy", cls="text-sm font-medium mb-1 mr-3"),
|
||||
self.mk_status(),
|
||||
cls="flex items-center"),
|
||||
Script(f"bindAIBuddy('{self._id}')"),
|
||||
id=f"{self._id}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_component_id(session):
|
||||
return f"{AI_BUDDY_INSTANCE_ID}{session['user_id']}"
|
||||
|
||||
@staticmethod
|
||||
async def async_check_llm_active():
|
||||
"""Asynchronous LLM status check"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.post(
|
||||
f"{OLLAMA_HOST}/api/pull",
|
||||
json={"name": "mistral"}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except Exception as e:
|
||||
return f"Error pulling Mistral model: {e}"
|
||||
|
||||
def query_mistral(self, prompt):
|
||||
"""Send a query to the Mistral model via Ollama API"""
|
||||
try:
|
||||
with httpx.Client(timeout=30.0) as client:
|
||||
response = client.post(
|
||||
f"{OLLAMA_HOST}/api/generate",
|
||||
json={
|
||||
"model": "mistral",
|
||||
"prompt": prompt,
|
||||
"stream": False
|
||||
}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e), "response": f"Error querying model: {e}"}
|
||||
|
||||
async def async_query_mistral(self, prompt):
|
||||
"""Asynchronous version of query_mistral"""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.post(
|
||||
f"{OLLAMA_HOST}/api/generate",
|
||||
json={
|
||||
"model": "mistral",
|
||||
"prompt": prompt,
|
||||
"stream": False
|
||||
}
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
return {"error": str(e), "response": f"Error querying model: {e}"}
|
||||
0
src/components/aibuddy/components/__init__.py
Normal file
0
src/components/aibuddy/components/__init__.py
Normal file
8
src/components/aibuddy/constants.py
Normal file
8
src/components/aibuddy/constants.py
Normal file
@@ -0,0 +1,8 @@
|
||||
AI_BUDDY_INSTANCE_ID = "__AIBuddy__"
|
||||
ROUTE_ROOT = "/ai"
|
||||
|
||||
|
||||
class Routes:
|
||||
Request = "/request"
|
||||
ResetRequest = "/reset-request"
|
||||
LlmStatus = "/llm-status"
|
||||
16
src/components/aibuddy/settings.py
Normal file
16
src/components/aibuddy/settings.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import dataclasses
|
||||
|
||||
from ai.debug_lmm import DebugConversation
|
||||
from ai.mcp_client import InProcessMCPClientCustomTools
|
||||
from core.settings_objects import BaseSettingObj
|
||||
|
||||
AI_BUDDY_SETTINGS_ENTRY = "AIBuddy"
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class AIBuddySettings(BaseSettingObj):
|
||||
__ENTRY_NAME__ = AI_BUDDY_SETTINGS_ENTRY
|
||||
|
||||
mcp_client_mod: str = InProcessMCPClientCustomTools.ID
|
||||
llm_use_tools: bool = True
|
||||
conversations: list[DebugConversation] = dataclasses.field(default_factory=list)
|
||||
Reference in New Issue
Block a user