45 Commits

Author SHA1 Message Date
3bd503d4d2 Added callback support 2025-08-29 19:17:24 +02:00
292a477298 Added Hooks implementation 2025-08-28 23:24:28 +02:00
eb8d6a99a2 Refactored JsonViewer 2025-08-27 23:06:12 +02:00
765c715d63 Added jsonviewer as an autonomous component 2025-08-27 21:38:47 +02:00
e90e7b01dd I can display an single workflow item 2025-08-26 22:47:16 +02:00
fe5668fbed Working version of EntrySelector 2025-08-26 21:58:42 +02:00
63058ef4a9 Added traceability 2025-08-25 23:20:10 +02:00
957a92f903 Added Workflow entry selector 2025-08-24 00:00:09 +02:00
33970c9c97 Added lazy loading when showing grid to improve performance 2025-08-23 22:26:14 +02:00
8eca1da3ca Updated .idea for Sheerka. Updated requirements for Arpeggio 2025-08-09 16:57:53 +02:00
97a5989390 removed .idea folder 2025-08-09 16:44:54 +02:00
e73709c859 Adding integration tests for workflow engine 2025-08-06 18:22:16 +02:00
f0d98d23ff I can finally chain Processor calls 2025-08-05 19:45:25 +02:00
64e7c44a7d Added other Jira resources 2025-08-05 10:42:26 +02:00
3a1870a160 Refactored properties component 2025-08-05 00:18:31 +02:00
c2fcfbb2ab I can save form (but user interaction is broken) 2025-08-04 18:36:28 +02:00
e74639c042 Properties Details correctly reacts on user interaction 2025-08-04 16:40:48 +02:00
badc2e28b0 Dialog box at the bottom. Property layout fully operationnel 2025-08-04 10:34:11 +02:00
4ac3eb2dfa The new layout is ok. We can work on the content 2025-08-03 18:11:26 +02:00
2bd998fe69 Refactoring Properties component 2025-08-03 11:10:17 +02:00
c694f42c07 The correct tab is shown on undo - redo 2025-08-02 15:27:11 +02:00
6949bb2814 Fixed WorkflowDesignerState 2025-08-02 01:22:45 +02:00
14f079d5f9 Added undo functionalities for all commands 2025-08-01 19:25:34 +02:00
3ca23449e4 Fixed unit tests 2025-08-01 18:55:40 +02:00
a6f765c624 Updated gitignore to remove .idea files 2025-08-01 09:16:03 +02:00
43e7dd5f00 Remove .idea directory from tracking 2025-08-01 09:15:17 +02:00
37c91d0d5d Another implementation of undo/redo 2025-07-31 22:54:09 +02:00
Kodjo Sossouvi
72f5f30da6 Working on undo redo 2025-07-25 17:22:18 +02:00
fb82365980 Improving functionalities and adding unit tests 2025-07-25 09:50:36 +02:00
aa8aa8f58c Working on undo redo capabilities 2025-07-24 23:41:27 +02:00
1ceddfac7c Fixed double shift implementation. Added implicit renaming 2025-07-24 21:42:13 +02:00
Kodjo Sossouvi
34f959812b Added extra fields management for Jira search 2025-07-24 17:52:42 +02:00
Kodjo Sossouvi
48b5c057f0 Fixed Jira icon color. Jira input details is now dynamic 2025-07-23 17:25:53 +02:00
Kodjo Sossouvi
0d7b94a045 Added Jira connectivity testing. Added alert management in AdminForm 2025-07-22 18:23:01 +02:00
e793aeda95 I can have different types of JIRA query 2025-07-21 23:21:40 +02:00
a0cf5aff0c Working implementation of DefaultDataPresenter 2025-07-20 19:17:55 +02:00
d064a553dd Adding Jira DataProcessor 2025-07-14 16:57:14 +02:00
6f17f6ee1f Fixed unit tests 2025-07-14 15:22:56 +02:00
ed793995fb Fixed unit tests 2025-07-13 18:11:17 +02:00
f3deeaefd1 Adding unit tests to WorkflowPlayer.py 2025-07-13 12:23:25 +02:00
fdf05edec3 Adding unit tests to WorkflowPlayer.py 2025-07-12 18:40:36 +02:00
bdd954b243 Improving error management 2025-07-12 17:45:30 +02:00
2754312141 Adding visual return when error 2025-07-12 09:52:56 +02:00
d0f7536fa0 Adding error management 2025-07-11 19:03:08 +02:00
2b288348e2 Adding error management 2025-07-11 18:34:04 +02:00
99 changed files with 6406 additions and 514 deletions

6
.gitignore vendored
View File

@@ -11,6 +11,10 @@ tests/TestDBEngineRoot
.sesskey
tools.db
.mytools_db
.idea/MyManagingTools.iml
.idea/misc.xml
.idea_bak
**/*.prof
# Created by .ignore support plugin (hsz.mobi)
### Python template
@@ -196,4 +200,4 @@ fabric.properties
.idea/caches/build_file_checksums.ser
# idea folder, uncomment if you don't need it
# .idea
# .idea

3
.idea/.gitignore generated vendored
View File

@@ -1,3 +0,0 @@
# Default ignored files
/shelf/
/workspace.xml

View File

@@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
</content>
<orderEntry type="jdk" jdkName="Python 3.12 (MyManagingTools)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@@ -1,5 +0,0 @@
<component name="ProjectCodeStyleConfiguration">
<state>
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
</state>
</component>

View File

@@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyInitNewSignatureInspection" enabled="false" level="WARNING" enabled_by_default="false" />
</profile>
</component>

7
.idea/misc.xml generated
View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Black">
<option name="sdkName" value="Python 3.12 (MyManagingTools)" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (MyManagingTools)" project-jdk-type="Python SDK" />
</project>

2
.idea/vcs.xml generated
View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
<mapping directory="" vcs="Git" />
</component>
</project>

View File

@@ -18,7 +18,9 @@ clean:
rm -rf Untitled*.ipynb
rm -rf .ipynb_checkpoints
rm -rf src/tools.db
rm -rf src/*.out
rm -rf src/*.prof
find . -name '.sesskey' -exec rm -rf {} +
find . -name '.pytest_cache' -exec rm -rf {} +
find . -name '__pycache__' -exec rm -rf {} +
find . -name 'debug.txt' -exec rm -rf {}
find . -name 'debug.txt' -exec rm -rf {}

View File

@@ -14,7 +14,7 @@ python main.py
```shell
docker-compose up -d
```
The application will be accessible on port 8000 (or whatever port you configured).
The application will be accessible on port 8001 (if the docker compose file was not changed !).
2. **Initialize the Mistral model** (first run):
```shell
@@ -34,4 +34,11 @@ docker-compose down
1. **Rebuild**:
```shell
docker-compose build
```
# Profiling
```shell
cd src
python -m cProfile -o profile.out main.py
snakeviz profile.out # 'pip install snakeviz' if snakeviz is not installed
```

View File

@@ -2,6 +2,7 @@ annotated-types==0.7.0
anyio==4.6.0
apsw==3.50.2.0
apswutils==0.1.0
Arpeggio==2.0.2
beautifulsoup4==4.12.3
certifi==2024.8.30
charset-normalizer==3.4.2
@@ -31,6 +32,7 @@ pydantic-settings==2.9.1
pydantic_core==2.33.2
Pygments==2.19.1
pytest==8.3.3
pytest-mock==3.14.1
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
python-fasthtml==0.12.21

View File

@@ -8,6 +8,7 @@
--datagrid-resize-zindex: 1;
--color-splitter: color-mix(in oklab, var(--color-base-content) 50%, #0000);
--color-splitter-active: color-mix(in oklab, var(--color-base-content) 50%, #ffff);
--color-btn-hover: color-mix(in oklab, var(--btn-color, var(--color-base-200)), #000 7%);
}
.mmt-tooltip-container {
@@ -36,6 +37,19 @@
transition: opacity 0.2s ease, visibility 0s linear 0.2s;
}
.mmt-btn {
user-select: none;
border-style: solid;
}
.mmt-btn:hover {
background-color: var(--color-btn-hover);
}
.mmt-btn-disabled {
opacity: 0.5;
/*cursor: not-allowed;*/
}
/* When parent is hovered, show the child elements with this class */
*:hover > .mmt-visible-on-hover {
@@ -63,6 +77,8 @@
width: 24px;
min-width: 24px;
height: 24px;
margin-top: auto;
margin-bottom: auto;
}
.icon-24 svg {

View File

@@ -1,6 +1,11 @@
const tooltipElementId = "mmt-app"
function bindTooltipsWithDelegation() {
// To display the tooltip, the attribute 'data-tooltip' is mandatory => it contains the text to tooltip
// Then
// the 'truncate' to show only when the text is truncated
// the class 'mmt-tooltip' for force the display
const elementId = tooltipElementId
console.debug("bindTooltips on element " + elementId);
@@ -20,11 +25,19 @@ function bindTooltipsWithDelegation() {
// Add a single mouseenter and mouseleave listener to the parent element
element.addEventListener("mouseenter", (event) => {
//console.debug("Entering element", event.target)
const cell = event.target.closest("[data-tooltip]");
if (!cell) return;
if (!cell) {
// console.debug(" No 'data-tooltip' attribute found. Stopping.");
return;
}
const no_tooltip = element.hasAttribute("mmt-no-tooltip");
if (no_tooltip) return;
if (no_tooltip) {
// console.debug(" Attribute 'mmt-no-tooltip' found. Cancelling.");
return;
}
const content = cell.querySelector(".truncate") || cell;
const isOverflowing = content.scrollWidth > content.clientWidth;

View File

@@ -1,4 +1,4 @@
from core.utils import get_user_id
from core.utils import get_user_id, get_unique_id
class BaseComponent:
@@ -51,3 +51,12 @@ class BaseComponentSingleton(BaseComponent):
@classmethod
def create_component_id(cls, session):
return f"{cls.COMPONENT_INSTANCE_ID}{session['user_id']}"
class BaseComponentMultipleInstance(BaseComponent):
COMPONENT_INSTANCE_ID = None
@classmethod
def create_component_id(cls, session):
component_id = cls.COMPONENT_INSTANCE_ID or cls.__name__
return get_unique_id(f"{component_id}{session['user_id']}")

View File

@@ -48,4 +48,28 @@ def post(session, _id: str, content: str):
def post(session, _id: str):
logger.debug(f"Entering {Routes.ImportHolidays} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.import_holidays()
return instance.import_holidays()
@rt(Routes.ConfigureJira)
def get(session, _id: str, boundaries: str):
logger.debug(f"Entering {Routes.ConfigureJira} - GET with args {debug_session(session)}, {_id=}, {boundaries=}")
instance = InstanceManager.get(session, _id)
return instance.show_configure_jira(json.loads(boundaries) if boundaries else None)
@rt(Routes.ConfigureJira)
def post(session, _id: str, args: dict):
logger.debug(f"Entering {Routes.ConfigureJira} - POST with args {debug_session(session)}, {_id=}, {args=}")
instance = InstanceManager.get(session, _id)
return instance.update_jira_settings(args)
@rt(Routes.ConfigureJiraCancel)
def post(session, _id: str):
logger.debug(f"Entering {Routes.ConfigureJiraCancel} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.cancel_jira_settings()
@rt(Routes.ConfigureJiraTest)
def post(session, _id: str, args: dict):
logger.debug(f"Entering {Routes.ConfigureJiraTest} with args {debug_session(session)}, {_id=}, {args=}")
instance = InstanceManager.get(session, _id)
return instance.test_jira_settings(args)

View File

@@ -23,9 +23,16 @@ class AiBuddySettingsEntry:
self.ollama_port = port
@dataclass()
class JiraSettingsEntry:
user_name: str = ""
api_token: str = ""
@dataclass
class AdminSettings:
ai_buddy: AiBuddySettingsEntry = field(default_factory=AiBuddySettingsEntry)
jira: JiraSettingsEntry = field(default_factory=JiraSettingsEntry)
class AdminDbManager:
@@ -37,3 +44,8 @@ class AdminDbManager:
ADMIN_SETTINGS_ENTRY,
AdminSettings,
"ai_buddy")
self.jira = NestedSettingsManager(session,
settings_manager,
ADMIN_SETTINGS_ENTRY,
AdminSettings,
"jira")

View File

@@ -0,0 +1,31 @@
from fastcore.basics import NotStr
icon_jira = NotStr("""<svg name="jira" viewBox="0 0 48 48" xmlns="http://www.w3.org/2000/svg">
<defs>
<style>.a{fill:none;stroke:currentColor;stroke-linecap:round;stroke-linejoin:round;stroke-width:2}</style>
</defs>
<path class="a" d="M5.5,22.9722h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556A8.7361,8.7361,0,0,0,25.0278,42.5h0V22.9722Z"/>
<path class="a" d="M14.2361,14.2361h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556a8.7361,8.7361,0,0,0,8.7361,8.7361h0V14.2361Z"/>
<path class="a" d="M22.9722,5.5h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556A8.7361,8.7361,0,0,0,42.5,25.0278h0V5.5Z"/>
</svg>""")
icon_msg_info = NotStr("""<svg name="info" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" class="h-6 w-6 shrink-0 stroke-current">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"></path>
</svg>
""")
icon_msg_success = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6 shrink-0 stroke-current" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
""")
icon_msg_warning = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6 shrink-0 stroke-current" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
</svg>
""")
icon_msg_error = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6 shrink-0 stroke-current" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
""")

View File

@@ -38,7 +38,39 @@ class AdminCommandManager(BaseCommandManager):
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", boundaries: getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
}
def show_configure_jira(self):
return {
"hx-get": f"{ROUTE_ROOT}{Routes.ConfigureJira}",
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", boundaries: getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
}
def save_configure_jira(self):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJira}",
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}"}}',
# The form adds the rest
}
def cancel_configure_jira(self):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJiraCancel}",
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}"}}',
}
def test_jira(self):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJiraTest}",
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}"}}',
}
class ImportHolidaysCommandManager(BaseCommandManager):
def __init__(self, owner):

View File

@@ -4,15 +4,17 @@ from ai.mcp_client import MPC_CLIENTS_IDS
from ai.mcp_tools import MCPServerTools
from components.BaseComponent import BaseComponent
from components.admin.admin_db_manager import AdminDbManager
from components.admin.assets.icons import icon_jira
from components.admin.commands import AdminCommandManager
from components.admin.components.AdminForm import AdminFormItem, AdminFormType, AdminForm
from components.admin.components.AdminForm import AdminFormItem, AdminFormType, AdminForm, AdminButton, AdminMessageType
from components.admin.components.ImportHolidays import ImportHolidays
from components.admin.constants import ADMIN_INSTANCE_ID, ADMIN_AI_BUDDY_INSTANCE_ID, ADMIN_IMPORT_HOLIDAYS_INSTANCE_ID
from components.admin.constants import ADMIN_INSTANCE_ID, ADMIN_AI_BUDDY_INSTANCE_ID, ADMIN_JIRA_INSTANCE_ID
from components.aibuddy.assets.icons import icon_brain_ok
from components.hoildays.assets.icons import icon_holidays
from components.tabs.components.MyTabs import MyTabs
from components_helpers import mk_ellipsis, mk_icon
from core.instance_manager import InstanceManager
from core.jira import Jira
class Admin(BaseComponent):
@@ -35,7 +37,7 @@ class Admin(BaseComponent):
hooks = {
"on_ok": self.commands.save_ai_buddy(),
"on_cancel": self.commands.cancel_ai_buddy(),
"ok_title": "Apply"
"ok_title": "Apply",
}
form = InstanceManager.get(self._session,
AdminForm.create_component_id(self._session, prefix=self._id),
@@ -59,8 +61,33 @@ class Admin(BaseComponent):
return self._add_tab(ADMIN_AI_BUDDY_INSTANCE_ID, "Admin - Import Holidays", form)
def show_configure_jira(self, boundaries):
fields = [
AdminFormItem('user_name', "Email", "Email used to connect to JIRA.", AdminFormType.TEXT),
AdminFormItem("api_token", "API Key", "API Key to connect to JIRA.", AdminFormType.TEXT),
]
hooks = {
"on_ok": self.commands.save_configure_jira(),
"on_cancel": self.commands.cancel_configure_jira(),
"ok_title": "Apply",
"extra_buttons": [AdminButton("Test", self.commands.test_jira)]
}
form = InstanceManager.get(self._session,
AdminForm.create_component_id(self._session, prefix=self._id),
AdminForm,
owner=self,
title="Jira Configuration Page",
obj=self.db.jira,
form_fields=fields,
hooks=hooks,
key=ADMIN_JIRA_INSTANCE_ID,
boundaries=boundaries
)
return self._add_tab(ADMIN_JIRA_INSTANCE_ID, "Admin - Jira Configuration", form)
def update_ai_buddy_settings(self, values: dict):
values = self.manage_lists(values)
values = AdminForm.get_fields_values(values)
self.db.ai_buddy.update(values, ignore_missing=True)
return self.tabs_manager.render()
@@ -69,6 +96,27 @@ class Admin(BaseComponent):
self.tabs_manager.remove_tab(tab_id)
return self.tabs_manager.render()
def update_jira_settings(self, values: dict):
values = AdminForm.get_fields_values(values)
self.db.jira.update(values, ignore_missing=True)
return self.tabs_manager.render()
def cancel_jira_settings(self):
tab_id = self.tabs_manager.get_tab_id(ADMIN_JIRA_INSTANCE_ID)
self.tabs_manager.remove_tab(tab_id)
return self.tabs_manager.render()
def test_jira_settings(self, values: dict):
values = AdminForm.get_fields_values(values)
jira = Jira(values["user_name"], values["api_token"])
form = self.tabs_manager.get_tab_content_by_key(ADMIN_JIRA_INSTANCE_ID)
res = jira.test()
if res.status_code == 200:
form.set_message("Success !", AdminMessageType.SUCCESS)
else:
form.set_message(f"Error {res.status_code} - {res.text}", AdminMessageType.ERROR)
return self.tabs_manager.render()
def __ft__(self):
return Div(
Div(cls="divider"),
@@ -84,6 +132,11 @@ class Admin(BaseComponent):
mk_ellipsis("holidays", cls="text-sm", **self.commands.show_import_holidays()),
cls="flex p-0 min-h-0 truncate",
),
Div(
mk_icon(icon_jira, can_select=False),
mk_ellipsis("jira", cls="text-sm", **self.commands.show_configure_jira()),
cls="flex p-0 min-h-0 truncate",
),
#
# cls=""),
# Script(f"bindAdmin('{self._id}')"),
@@ -97,40 +150,3 @@ class Admin(BaseComponent):
@staticmethod
def create_component_id(session):
return f"{ADMIN_INSTANCE_ID}{session['user_id']}"
@staticmethod
def manage_lists(data_dict):
"""
Processes a dictionary of key-value pairs to reorganize keys based on specific
criteria. If a key ends with its corresponding string value, the method extracts
the prefix of the key (the portion of the key before the value) and groups the
value under this prefix in a list. Otherwise, the original key-value pair is
preserved in the resulting dictionary.
:param data_dict: Dictionary where each key is a string and its corresponding
value can be of any type.
:type data_dict: dict
:return: A dictionary where the keys have been categorized into groups
based on whether they end with the same string value, reorganized into
lists, while preserving other key-value pairs as they are.
:rtype: dict
"""
result_dict = {}
for key, value in data_dict.items():
# Check if the value is a string and the key ends with the value
if isinstance(value, str) and key.endswith(value):
# Find the beginning part of the key (before the value)
prefix = key.replace(value, '').rstrip('_')
# Add the value to the list under the prefix key
if prefix not in result_dict:
result_dict[prefix] = []
result_dict[prefix].append(value)
else:
result_dict[key] = value
return result_dict

View File

@@ -1,10 +1,12 @@
from dataclasses import dataclass
from typing import Any
from typing import Any, Callable
from fasthtml.components import *
from assets.icons import icon_error
from components.BaseComponent import BaseComponent
from components_helpers import apply_boundaries, mk_dialog_buttons, safe_get_dialog_buttons_parameters
from components.admin.assets.icons import icon_msg_success, icon_msg_info, icon_msg_error, icon_msg_warning
from components_helpers import apply_boundaries, mk_dialog_buttons, safe_get_dialog_buttons_parameters, mk_icon
from core.utils import get_unique_id
@@ -18,6 +20,14 @@ class AdminFormType:
TEXTAREA = "textarea"
class AdminMessageType:
NONE = "none"
SUCCESS = "success"
ERROR = "error"
INFO = "info"
WARNING = "warning"
@dataclass
class AdminFormItem:
name: str
@@ -27,6 +37,12 @@ class AdminFormItem:
possible_values: list[str] = None
@dataclass
class AdminButton:
title: str
on_click: Callable = None
class AdminForm(BaseComponent):
def __init__(self, session, _id, owner, title: str, obj: Any, form_fields: list[AdminFormItem], hooks=None, key=None,
boundaries=None):
@@ -38,6 +54,21 @@ class AdminForm(BaseComponent):
self.title = title
self.obj = obj
self.form_fields = form_fields
self.message = None
def set_message(self, message, msg_type: AdminMessageType.NONE):
if msg_type == AdminMessageType.NONE:
self.message = message
else:
if msg_type == AdminMessageType.SUCCESS:
icon = icon_msg_success
elif msg_type == AdminMessageType.ERROR:
icon = icon_msg_error
elif msg_type == AdminMessageType.WARNING:
icon = icon_msg_warning
else:
icon = icon_msg_info
self.message = Div(icon, Span(message), role=msg_type, cls=f"alert alert-{msg_type} mr-2")
def mk_input(self, item: AdminFormItem):
return Input(
@@ -62,7 +93,7 @@ class AdminForm(BaseComponent):
cls="checkbox checkbox-xs",
checked=value in current_values
),
cls="checkbox-item") for value in item.possible_values]
return Div(*checkbox_items, cls="adm-items-group")
@@ -95,9 +126,20 @@ class AdminForm(BaseComponent):
else:
return self.mk_input(item)
def mk_extra_buttons(self):
extra_buttons = self._hooks.get("extra_buttons", None)
if not extra_buttons:
return None
return Div(
*[Button(btn.title, cls="btn btn-ghost btn-sm", **btn.on_click()) for btn in extra_buttons],
cls="flex justify-end"
)
def __ft__(self):
return Form(
Fieldset(Legend(self.title, cls="fieldset-legend"),
Div(self.message),
*[
Div(
Label(item.title, cls="label"),
@@ -107,6 +149,7 @@ class AdminForm(BaseComponent):
for item in self.form_fields
],
self.mk_extra_buttons(),
mk_dialog_buttons(**safe_get_dialog_buttons_parameters(self._hooks)),
**apply_boundaries(self._boundaries),
cls="fieldset bg-base-200 border-base-300 rounded-box w-xs border p-4"
@@ -119,3 +162,40 @@ class AdminForm(BaseComponent):
suffix = get_unique_id()
return f"{prefix}{suffix}"
@staticmethod
def get_fields_values(data_dict):
"""
Processes a dictionary of key-value pairs to reorganize keys based on specific
criteria. If a key ends with its corresponding string value, the method extracts
the prefix of the key (the portion of the key before the value) and groups the
value under this prefix in a list. Otherwise, the original key-value pair is
preserved in the resulting dictionary.
:param data_dict: Dictionary where each key is a string and its corresponding
value can be of any type.
:type data_dict: dict
:return: A dictionary where the keys have been categorized into groups
based on whether they end with the same string value, reorganized into
lists, while preserving other key-value pairs as they are.
:rtype: dict
"""
result_dict = {}
for key, value in data_dict.items():
# Check if the value is a string and the key ends with the value
if isinstance(value, str) and key.endswith(value):
# Find the beginning part of the key (before the value)
prefix = key.replace(value, '').rstrip('_')
# Add the value to the list under the prefix key
if prefix not in result_dict:
result_dict[prefix] = []
result_dict[prefix].append(value)
else:
result_dict[key] = value
return result_dict

View File

@@ -1,6 +1,7 @@
ADMIN_INSTANCE_ID = "__Admin__"
ADMIN_AI_BUDDY_INSTANCE_ID = "__AdminAIBuddy__"
ADMIN_IMPORT_HOLIDAYS_INSTANCE_ID = "__AdminImportHolidays__"
ADMIN_JIRA_INSTANCE_ID = "__AdminJira__"
ROUTE_ROOT = "/admin"
ADMIN_SETTINGS_ENTRY = "Admin"
@@ -8,4 +9,7 @@ class Routes:
AiBuddy = "/ai-buddy"
AiBuddyCancel = "/ai-buddy-cancel"
ImportHolidays = "/import-holidays"
PasteHolidays = "/paste-holidays"
PasteHolidays = "/paste-holidays"
ConfigureJira = "/configure-jira"
ConfigureJiraCancel = "/configure-jira-cancel"
ConfigureJiraTest = "/configure-jira-test"

View File

@@ -39,7 +39,9 @@ function bindAIBuddy(elementId) {
event.preventDefault();
makeAIRequest();
}
});
document.addEventListener('keyup', (event) => {
if (event.key === 'Shift') {
const currentTime = new Date().getTime();
if (currentTime - lastShiftPress <= doublePressDelay) {

View File

@@ -136,3 +136,10 @@ def post(session, _id: str, state: str, args: str = None):
logger.debug(f"Entering on_state_changed with args {_id=}, {state=}, {args=}")
instance = InstanceManager.get(session, _id)
return instance.manage_state_changed(state, args)
@rt(Routes.GetPage)
def get(session, _id: str, page_index: int):
logger.debug(f"Entering {Routes.GetPage} with args {_id=}, {page_index=}")
instance = InstanceManager.get(session, _id)
return instance.mk_body_content_page(page_index)

View File

@@ -1,6 +1,6 @@
function bindDatagrid(datagridId, allowColumnsReordering) {
bindScrollbars(datagridId);
makeResizable(datagridId)
manageScrollbars(datagridId, true);
makeResizable(datagridId);
}
function bindScrollbars(datagridId) {
@@ -21,7 +21,7 @@ function bindScrollbars(datagridId) {
const table = datagrid.querySelector(".dt2-table");
if (!verticalScrollbar || !verticalWrapper || !horizontalScrollbar || !horizontalWrapper || !body || !table) {
console.error("Essential scrollbar or content elements are missing in the datagrid.");
console.error("Essential scrollbars or content elements are missing in the datagrid.");
return;
}
@@ -176,6 +176,224 @@ function bindScrollbars(datagridId) {
});
}
function manageScrollbars(datagridId, binding) {
console.debug("manageScrollbars on element " + datagridId + " with binding=" + binding);
const datagrid = document.getElementById(datagridId);
if (!datagrid) {
console.error(`Datagrid with id "${datagridId}" not found.`);
return;
}
const verticalScrollbar = datagrid.querySelector(".dt2-scrollbars-vertical");
const verticalWrapper = datagrid.querySelector(".dt2-scrollbars-vertical-wrapper");
const horizontalScrollbar = datagrid.querySelector(".dt2-scrollbars-horizontal");
const horizontalWrapper = datagrid.querySelector(".dt2-scrollbars-horizontal-wrapper");
const body = datagrid.querySelector(".dt2-body");
const table = datagrid.querySelector(".dt2-table");
if (!verticalScrollbar || !verticalWrapper || !horizontalScrollbar || !horizontalWrapper || !body || !table) {
console.error("Essential scrollbars or content elements are missing in the datagrid.");
return;
}
const computeScrollbarVisibility = () => {
// Determine if the content is clipped
const isVerticalRequired = body.scrollHeight > body.clientHeight;
const isHorizontalRequired = table.scrollWidth > table.clientWidth;
// Show or hide the scrollbar wrappers
requestAnimationFrame(() => {
verticalWrapper.style.display = isVerticalRequired ? "block" : "none";
horizontalWrapper.style.display = isHorizontalRequired ? "block" : "none";
});
};
const computeScrollbarSize = () => {
// Vertical scrollbar height
const visibleHeight = body.clientHeight;
const totalHeight = body.scrollHeight;
const wrapperHeight = verticalWrapper.offsetHeight;
let scrollbarHeight = 0;
if (totalHeight > 0) {
scrollbarHeight = (visibleHeight / totalHeight) * wrapperHeight;
}
// Horizontal scrollbar width
const visibleWidth = table.clientWidth;
const totalWidth = table.scrollWidth;
const wrapperWidth = horizontalWrapper.offsetWidth;
let scrollbarWidth = 0;
if (totalWidth > 0) {
scrollbarWidth = (visibleWidth / totalWidth) * wrapperWidth;
}
requestAnimationFrame(() => {
verticalScrollbar.style.height = `${scrollbarHeight}px`;
horizontalScrollbar.style.width = `${scrollbarWidth}px`;
});
};
const updateVerticalScrollbarForMouseWheel = () => {
const maxScrollTop = body.scrollHeight - body.clientHeight;
const wrapperHeight = verticalWrapper.offsetHeight;
if (maxScrollTop > 0) {
const scrollRatio = wrapperHeight / body.scrollHeight;
verticalScrollbar.style.top = `${body.scrollTop * scrollRatio}px`;
}
};
if (binding) {
// Clean up existing managers if they exist
if (datagrid._managers) {
// Remove drag events
if (datagrid._managers.dragManager) {
verticalScrollbar.removeEventListener("mousedown", datagrid._managers.dragManager.verticalMouseDown);
horizontalScrollbar.removeEventListener("mousedown", datagrid._managers.dragManager.horizontalMouseDown);
document.removeEventListener("mousemove", datagrid._managers.dragManager.mouseMove);
document.removeEventListener("mouseup", datagrid._managers.dragManager.mouseUp);
}
// Remove wheel events
if (datagrid._managers.wheelManager) {
body.removeEventListener("wheel", datagrid._managers.wheelManager.handleWheelScrolling);
}
// Remove resize events
if (datagrid._managers.resizeManager) {
window.removeEventListener("resize", datagrid._managers.resizeManager.handleResize);
}
}
// Create managers
const dragManager = {
isDragging: false,
startY: 0,
startX: 0,
updateVerticalScrollbar: (deltaX, deltaY) => {
const wrapperHeight = verticalWrapper.offsetHeight;
const scrollbarHeight = verticalScrollbar.offsetHeight;
const maxScrollTop = body.scrollHeight - body.clientHeight;
const scrollRatio = maxScrollTop / (wrapperHeight - scrollbarHeight);
let newTop = parseFloat(verticalScrollbar.style.top || "0") + deltaY;
newTop = Math.max(0, Math.min(newTop, wrapperHeight - scrollbarHeight));
verticalScrollbar.style.top = `${newTop}px`;
body.scrollTop = newTop * scrollRatio;
},
updateHorizontalScrollbar: (deltaX, deltaY) => {
const wrapperWidth = horizontalWrapper.offsetWidth;
const scrollbarWidth = horizontalScrollbar.offsetWidth;
const maxScrollLeft = table.scrollWidth - table.clientWidth;
const scrollRatio = maxScrollLeft / (wrapperWidth - scrollbarWidth);
let newLeft = parseFloat(horizontalScrollbar.style.left || "0") + deltaX;
newLeft = Math.max(0, Math.min(newLeft, wrapperWidth - scrollbarWidth));
horizontalScrollbar.style.left = `${newLeft}px`;
table.scrollLeft = newLeft * scrollRatio;
},
verticalMouseDown: (e) => {
disableTooltip();
dragManager.isDragging = true;
dragManager.startY = e.clientY;
dragManager.startX = e.clientX;
document.body.style.userSelect = "none";
verticalScrollbar.classList.add("dt2-dragging");
},
horizontalMouseDown: (e) => {
disableTooltip();
dragManager.isDragging = true;
dragManager.startY = e.clientY;
dragManager.startX = e.clientX;
document.body.style.userSelect = "none";
horizontalScrollbar.classList.add("dt2-dragging");
},
mouseMove: (e) => {
if (dragManager.isDragging) {
const deltaY = e.clientY - dragManager.startY;
const deltaX = e.clientX - dragManager.startX;
// Determine which scrollbar is being dragged
if (verticalScrollbar.classList.contains("dt2-dragging")) {
dragManager.updateVerticalScrollbar(deltaX, deltaY);
} else if (horizontalScrollbar.classList.contains("dt2-dragging")) {
dragManager.updateHorizontalScrollbar(deltaX, deltaY);
}
// Reset start points for next update
dragManager.startY = e.clientY;
dragManager.startX = e.clientX;
}
},
mouseUp: () => {
dragManager.isDragging = false;
document.body.style.userSelect = "";
verticalScrollbar.classList.remove("dt2-dragging");
horizontalScrollbar.classList.remove("dt2-dragging");
enableTooltip();
}
};
const wheelManager = {
handleWheelScrolling: (event) => {
const deltaX = event.deltaX;
const deltaY = event.deltaY;
// Scroll the body and table content
body.scrollTop += deltaY; // Vertical scrolling
table.scrollLeft += deltaX; // Horizontal scrolling
// Update the vertical scrollbar position
updateVerticalScrollbarForMouseWheel();
// Prevent default behavior to fully manage the scroll
event.preventDefault();
}
};
const resizeManager = {
handleResize: () => {
computeScrollbarVisibility();
computeScrollbarSize();
updateVerticalScrollbarForMouseWheel();
}
};
// Store managers on datagrid for cleanup
datagrid._managers = {
dragManager,
wheelManager,
resizeManager
};
// Bind events
verticalScrollbar.addEventListener("mousedown", dragManager.verticalMouseDown);
horizontalScrollbar.addEventListener("mousedown", dragManager.horizontalMouseDown);
document.addEventListener("mousemove", dragManager.mouseMove);
document.addEventListener("mouseup", dragManager.mouseUp);
body.addEventListener("wheel", wheelManager.handleWheelScrolling, {passive: false});
window.addEventListener("resize", resizeManager.handleResize);
}
// Always execute computations
computeScrollbarVisibility();
computeScrollbarSize();
}
function makeResizable(datagridId) {
console.debug("makeResizable on element " + datagridId);
@@ -494,4 +712,5 @@ function onAfterSettle(datagridId, event) {
if (response.includes("hx-on::before-settle")) {
bindDatagrid(datagridId)
}
}
}

View File

@@ -1,4 +1,5 @@
import copy
import html
import logging
from io import BytesIO
from typing import Literal, Any
@@ -20,9 +21,10 @@ from components.datagrid_new.db_management import DataGridDbManager
from components.datagrid_new.settings import DataGridRowState, DataGridColumnState, \
DataGridFooterConf, DataGridState, DataGridSettings, DatagridView
from components_helpers import mk_icon, mk_ellipsis
from core.fasthtml_helper import MyDiv, mk_my_ellipsis, MySpan, mk_my_icon
from core.instance_manager import InstanceManager
from core.settings_management import SettingsManager
from core.utils import get_unique_id, make_safe_id
from core.utils import get_unique_id, make_safe_id, timed
logger = logging.getLogger("DataGrid")
@@ -59,6 +61,8 @@ class DataGrid(BaseComponent):
self._state: DataGridState = self._db.load_state()
self._settings: DataGridSettings = grid_settings or self._db.load_settings()
self._df: DataFrame | None = self._db.load_dataframe()
self._fast_access = self._init_fast_access(self._df)
self._total_rows = len(self._df) if self._df is not None else 0
# update boundaries if possible
self.set_boundaries(boundaries)
@@ -118,14 +122,23 @@ class DataGrid(BaseComponent):
else:
return ColumnType.Text # Default to Text if no match
def _init_columns(_df):
columns = [DataGridColumnState(make_safe_id(col_id),
col_index,
col_id,
_get_column_type(self._df[make_safe_id(col_id)].dtype))
for col_index, col_id in enumerate(_df.columns)]
if self._state.row_index:
columns.insert(0, DataGridColumnState(make_safe_id(ROW_INDEX_ID), -1, " ", ColumnType.RowIndex))
return columns
self._df = df.copy()
self._df.columns = self._df.columns.map(make_safe_id) # make sure column names are trimmed
self._state.rows = [DataGridRowState(row_id) for row_id in self._df.index]
self._state.columns = [DataGridColumnState(make_safe_id(col_id),
col_index,
col_id,
_get_column_type(self._df[make_safe_id(col_id)].dtype))
for col_index, col_id in enumerate(df.columns)]
self._state.columns = _init_columns(df) # use df not self._df to keep the original title
self._fast_access = self._init_fast_access(self._df)
self._total_rows = len(self._df) if self._df is not None else 0
if save_state:
self._db.save_all(None, self._state, self._df)
@@ -205,6 +218,7 @@ class DataGrid(BaseComponent):
self._state.columns = new_columns_states
self._fast_access = self._init_fast_access(self._df)
self._views.recompute_need_save()
self._db.save_all(self._settings, self._state, self._df if new_column else None)
@@ -439,7 +453,7 @@ class DataGrid(BaseComponent):
_mk_keyboard_management(),
Div(
self.mk_table_header(),
self.mk_table_body(),
self.mk_table_body_page(),
self.mk_table_footer(),
cls="dt2-inner-table"),
cls="dt2-table",
@@ -479,20 +493,18 @@ class DataGrid(BaseComponent):
id=f"th_{self._id}"
)
def mk_table_body(self):
df = self._get_filtered_df()
def mk_table_body_page(self):
"""
This function is used to update the table body when the vertical scrollbar reaches the end
A new page is added when requested
"""
max_height = self._compute_body_max_height()
return Div(
*[Div(
*[self.mk_body_cell(col_pos, row_index, col_def) for col_pos, col_def in enumerate(self._state.columns)],
cls="dt2-row",
data_row=f"{row_index}",
id=f"tr_{self._id}-{row_index}",
) for row_index in df.index],
*self.mk_body_content_page(0),
cls="dt2-body",
style=f"max-height:{max_height}px;",
id=f"tb_{self._id}"
id=f"tb_{self._id}",
)
def mk_table_footer(self):
@@ -507,34 +519,55 @@ class DataGrid(BaseComponent):
id=f"tf_{self._id}"
)
def mk_body_content_page(self, page_index: int):
df = self._get_filtered_df()
start = page_index * DATAGRID_PAGE_SIZE
end = start + DATAGRID_PAGE_SIZE
if self._total_rows > end:
last_row = df.index[end - 1]
else:
last_row = None
rows = [Div(
*[self.mk_body_cell(col_pos, row_index, col_def) for col_pos, col_def in enumerate(self._state.columns)],
cls="dt2-row",
data_row=f"{row_index}",
id=f"tr_{self._id}-{row_index}",
**self.commands.get_page(page_index + 1) if row_index == last_row else {}
) for row_index in df.index[start:end]]
rows.append(Script(f"manageScrollbars('{self._id}', false);"), )
return rows
def mk_body_cell(self, col_pos, row_index, col_def: DataGridColumnState):
if not col_def.usable:
return None
if not col_def.visible:
return Div(cls="dt2-col-hidden")
return MyDiv(cls="dt2-col-hidden")
content = self.mk_body_cell_content(col_pos, row_index, col_def)
return Div(content,
data_col=col_def.col_id,
style=f"width:{col_def.width}px;",
cls="dt2-cell")
return MyDiv(content,
data_col=col_def.col_id,
style=f"width:{col_def.width}px;",
cls="dt2-cell")
def mk_body_cell_content(self, col_pos, row_index, col_def: DataGridColumnState):
def mk_bool(value):
return Div(mk_icon(icon_checked if value else icon_unchecked, can_select=False),
cls="dt2-cell-content-checkbox")
def mk_bool(_value):
return MyDiv(mk_my_icon(icon_checked if _value else icon_unchecked, can_select=False),
cls="dt2-cell-content-checkbox")
def mk_text(value):
return mk_ellipsis(value, cls="dt2-cell-content-text")
def mk_text(_value):
return mk_my_ellipsis(_value, cls="dt2-cell-content-text")
def mk_number(value):
return mk_ellipsis(value, cls="dt2-cell-content-number")
def mk_number(_value):
return mk_my_ellipsis(_value, cls="dt2-cell-content-number")
def process_cell_content(value):
value_str = str(value)
def process_cell_content(_value):
value_str = html.escape(str(_value))
if FILTER_INPUT_CID not in self._state.filtered or (
keyword := self._state.filtered[FILTER_INPUT_CID]) is None:
@@ -545,21 +578,22 @@ class DataGrid(BaseComponent):
return value_str
len_keyword = len(keyword)
res = [Span(value_str[:index])] if index > 0 else []
res += [Span(value_str[index:index + len_keyword], cls="dt2-highlight-1")]
res += [Span(value_str[index + len_keyword:])] if len(value_str) > len_keyword else []
res = [MySpan(value_str[:index])] if index > 0 else []
res += [MySpan(value_str[index:index + len_keyword], cls="dt2-highlight-1")]
res += [MySpan(value_str[index + len_keyword:])] if len(value_str) > len_keyword else []
return tuple(res)
column_type = col_def.type
value = self._fast_access[col_def.col_id][row_index]
if column_type == ColumnType.Bool:
content = mk_bool(self._df.iloc[row_index, col_def.col_index])
content = mk_bool(value)
elif column_type == ColumnType.Number:
content = mk_number(process_cell_content(self._df.iloc[row_index, col_def.col_index]))
content = mk_number(process_cell_content(value))
elif column_type == ColumnType.RowIndex:
content = mk_number(row_index)
else:
content = mk_text(process_cell_content(self._df.iloc[row_index, col_def.col_index]))
content = mk_text(process_cell_content(value))
return content
@@ -822,6 +856,31 @@ class DataGrid(BaseComponent):
return True
@staticmethod
def _init_fast_access(df):
"""
Generates a fast-access dictionary for a DataFrame.
This method converts the columns of the provided DataFrame into NumPy arrays
and stores them as values in a dictionary, using the column names as keys.
This allows for efficient access to the data stored in the DataFrame.
Args:
df (DataFrame): The input pandas DataFrame whose columns are to be converted
into a dictionary of NumPy arrays.
Returns:
dict: A dictionary where the keys are the column names of the input DataFrame
and the values are the corresponding column values as NumPy arrays.
"""
if df is None:
return {}
res = {col: df[col].to_numpy() for col in df.columns}
res[ROW_INDEX_ID] = df.index.to_numpy()
return res
@timed
def __ft__(self):
return Div(
Div(
@@ -844,7 +903,7 @@ class DataGrid(BaseComponent):
@staticmethod
def new(session, data, index=None):
datagrid = DataGrid(session, DataGrid.create_component_id(session))
#dataframe = DataFrame(data, index=index)
# dataframe = DataFrame(data, index=index)
dataframe = DataFrame(data)
datagrid.init_from_dataframe(dataframe)
return datagrid

View File

@@ -91,12 +91,21 @@ class DataGridCommandManager(BaseCommandManager):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.OnClick}",
"hx-target": f"#tsm_{self._id}",
"hx-trigger" : "click",
"hx-trigger": "click",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{_id: "{self._id}", cell_id:getCellId(event), modifier:getClickModifier(event), boundaries: getCellBoundaries(event)}}',
"hx-on::before-request": f'validateOnClickRequest("{self._id}", event)',
}
def get_page(self, page_index=0):
return {
"hx-get": f"{ROUTE_ROOT}{Routes.GetPage}",
"hx-target": f"#tb_{self._id}",
"hx-swap": "beforeend",
"hx-vals": f'{{"_id": "{self._id}", "page_index": "{page_index}"}}',
"hx-trigger": f"intersect root:#tb_{self._id} once",
}
def _get_hide_show_columns_attrs(self, mode, col_defs: list, new_value, cls=""):
str_col_names = ", ".join(f"'{col_def.title}'" for col_def in col_defs)
tooltip_msg = f"{mode} column{'s' if len(col_defs) > 1 else ''} {str_col_names}"
@@ -109,38 +118,6 @@ class DataGridCommandManager(BaseCommandManager):
"data_tooltip": tooltip_msg,
"cls": self.merge_class(cls, "mmt-tooltip")
}
#
# @staticmethod
# def merge(*items):
# """
# Merges multiple dictionaries into a single dictionary by combining their key-value pairs.
# If a key exists in multiple dictionaries and its value is a string, the values are concatenated.
# If the key's value is not a string, an error is raised.
#
# :param items: dictionaries to be merged. If all items are None, None is returned.
# :return: A single dictionary containing the merged key-value pairs from all input dictionaries.
# :raises NotImplementedError: If a key's value is not a string and exists in multiple input dictionaries.
# """
# if all(item is None for item in items):
# return None
#
# res = {}
# for item in [item for item in items if item is not None]:
#
# for key, value in item.items():
# if not key in res:
# res[key] = value
# else:
# if isinstance(res[key], str):
# res[key] += " " + value
# else:
# raise NotImplementedError("")
#
# return res
#
# @staticmethod
# def merge_class(cls1, cls2):
# return (cls1 + " " + cls2) if cls2 else cls1
class FilterAllCommands(BaseCommandManager):
@@ -165,4 +142,4 @@ class FilterAllCommands(BaseCommandManager):
"hx_vals": f'{{"_id": "{self._id}", "col_id":"{FILTER_INPUT_CID}"}}',
"data_tooltip": "Reset filter",
"cls": self.merge_class(cls, "mmt-tooltip"),
}
}

View File

@@ -17,6 +17,9 @@ CONTAINER_HEIGHT = "container_height"
DATAGRID_STATE_FOOTER = "footer"
DATAGRID_PAGE_SIZE = 50
ROW_INDEX_ID = "__row_index__"
class Routes:
Filter = "/filter" # request the filtering in the grid
@@ -33,6 +36,7 @@ class Routes:
UpdateView = "/update_view"
ShowFooterMenu = "/show_footer_menu"
UpdateState = "/update_state"
GetPage = "/page"
class ColumnType(Enum):
@@ -44,11 +48,13 @@ class ColumnType(Enum):
Choice = "Choice"
List = "List"
class ViewType(Enum):
Table = "Table"
Chart = "Chart"
Form = "Form"
class FooterAggregation(Enum):
Sum = "Sum"
Mean = "Mean"
@@ -59,4 +65,4 @@ class FooterAggregation(Enum):
FilteredMean = "FilteredMean"
FilteredMin = "FilteredMin"
FilteredMax = "FilteredMax"
FilteredCount = "FilteredCount"
FilteredCount = "FilteredCount"

View File

@@ -36,7 +36,7 @@ class DataGridDbManager:
self._settings_manager.save(self._session, self._get_db_entry(), {})
def _get_db_entry(self):
return f"{DATAGRID_DB_ENTRY}_{self._key}"
return make_safe_id(f"{DATAGRID_DB_ENTRY}_{self._key}")
@staticmethod
def _key_as_string(key):

View File

@@ -69,6 +69,7 @@ class DataGridSettings:
class DataGridState:
sidebar_visible: bool = False
selected_view: str = None
row_index: bool = False
columns: list[DataGridColumnState] = dataclasses.field(default_factory=list)
rows: list[DataGridRowState] = dataclasses.field(default_factory=list) # only the rows that have a specific state
footers: list[DataGridFooterConf] = dataclasses.field(default_factory=list)

View File

@@ -62,7 +62,7 @@ class JsonViewerHelper:
class JsonViewer(BaseComponent):
def __init__(self, session, _id, owner, user_id, data, hooks=None, key=None, boundaries=None):
super().__init__(session, _id)
self._key = key
self._key = key # for comparison between two jsonviewer components
self._owner = owner # debugger component
self.user_id = user_id
self.data = data
@@ -88,6 +88,10 @@ class JsonViewer(BaseComponent):
self._helper = JsonViewerHelper()
def set_data(self, data):
self.data = data
self.node = self._create_node(None, data)
def set_node_folding(self, node_id, folding):
if folding == self._folding_mode:
self._nodes_to_track.remove(node_id)
@@ -311,8 +315,6 @@ class JsonViewer(BaseComponent):
def __hash__(self):
return hash(self._key) if self._key is not None else super().__hash__()
@staticmethod
def add_quotes(value: str):
if '"' in value and "'" in value:

View File

@@ -11,6 +11,7 @@ from components.drawerlayout.assets.icons import icon_panel_contract_regular, ic
from components.drawerlayout.constants import DRAWER_LAYOUT_INSTANCE_ID
from components.repositories.components.Repositories import Repositories
from components.tabs.components.MyTabs import MyTabs
from components.undo_redo.components.UndoRedo import UndoRedo
from components.workflows.components.Workflows import Workflows
from core.instance_manager import InstanceManager
from core.settings_management import SettingsManager
@@ -31,6 +32,7 @@ class DrawerLayout(BaseComponent):
self._ai_buddy = self._create_component(AIBuddy)
self._admin = self._create_component(Admin)
self._applications = self._create_component(Applications)
self._undo_redo = self._create_component(UndoRedo)
self.top_components = self._get_sub_components("TOP", [self._ai_buddy])
self.bottom_components = self._get_sub_components("BOTTOM", [self._ai_buddy])
@@ -53,12 +55,16 @@ class DrawerLayout(BaseComponent):
name="sidebar"
),
Div(
Label(
Input(type="checkbox",
onclick=f"document.getElementById('sidebar_{self._id}').classList.toggle('collapsed');"),
icon_panel_contract_regular,
icon_panel_expand_regular,
cls="swap",
Div(
Label(
Input(type="checkbox",
onclick=f"document.getElementById('sidebar_{self._id}').classList.toggle('collapsed');"),
icon_panel_contract_regular,
icon_panel_expand_regular,
cls="swap mr-4",
),
self._undo_redo,
cls="flex"
),
Div(*[component for component in self.top_components], name="top", cls='dl-top'),
Div(self._tabs, id=f"page_{self._id}", name="page", cls='dl-page'),

View File

@@ -0,0 +1,26 @@
import logging
from fasthtml.fastapp import fast_app
from components.entryselector.constants import Routes
from core.instance_manager import debug_session, InstanceManager
logger = logging.getLogger("EntrySelectorApp")
repositories_app, rt = fast_app()
@rt(Routes.Select)
def get(session, _id: str, entry: str):
logger.debug(f"Entering {Routes.Select} with args {debug_session(session)}, {_id=}, {entry=}")
instance = InstanceManager.get(session, _id)
to_update = instance.select_entry(entry)
res = [instance]
if res is None:
return instance
if isinstance(to_update, (list, tuple)):
res.extend(to_update)
else:
res.append(to_update)
return tuple(res)

View File

View File

@@ -0,0 +1,20 @@
.es-container {
overflow-x: auto;
white-space: nowrap;
}
.es-entry {
border: 2px solid var(--color-base-300);
padding: 2px;
cursor: pointer;
display: inline-block; /* Ensure entries align horizontally if needed */
}
.es-entry-selected {
border: 2px solid var(--color-primary);
}
.es-entry:hover {
background-color: var(--color-base-300);
}

View File

@@ -0,0 +1,15 @@
from components.BaseCommandManager import BaseCommandManager
from components.entryselector.constants import Routes, ROUTE_ROOT
class EntrySelectorCommandManager(BaseCommandManager):
def __init__(self, owner):
super().__init__(owner)
def select_entry(self, entry):
return {
"hx-get": f"{ROUTE_ROOT}{Routes.Select}",
"hx-target": f"#{self._id}",
"hx-swap": "outerHTML",
"hx-vals": f'{{"_id": "{self._id}", "entry": "{entry}"}}',
}

View File

@@ -0,0 +1,56 @@
import logging
from fasthtml.components import *
from components.BaseComponent import BaseComponentMultipleInstance
from components.entryselector.commands import EntrySelectorCommandManager
logger = logging.getLogger("EntrySelector")
class EntrySelector(BaseComponentMultipleInstance):
def __init__(self, session, _id, owner, data=None, hooks=None, key=None, boundaries=None):
super().__init__(session, _id)
self._key = key
self._owner = owner # debugger component
self.data = data
self.selected = None
self.hooks = hooks
self._boundaries = boundaries if boundaries else {"width": "300"}
self._commands = EntrySelectorCommandManager(self)
def set_data(self, data):
self.data = data
def set_selected(self, selected):
if selected is None:
self.selected = None
else:
self.selected = int(selected)
def set_boundaries(self, boundaries):
self._boundaries = boundaries
def select_entry(self, entry):
logger.debug(f"Selecting entry {entry}")
self.set_selected(entry)
if self.hooks is not None and (on_entry_selected := self.hooks.get("on_entry_selected", None)) is not None:
return on_entry_selected(entry)
else:
return None
def _mk_content(self):
if not self.data:
return [Div("no entry")]
return [Div(index,
**self._commands.select_entry(index),
cls=f"es-entry {'es-entry-selected' if index == self.selected else ''}")
for index in range(self.data)]
def __ft__(self):
return Div(
*self._mk_content(),
cls="flex es-container",
id=f"{self._id}",
)

View File

@@ -0,0 +1,5 @@
ROUTE_ROOT = "/es" # for EntrySelector
class Routes:
Select = "/select"

View File

@@ -0,0 +1,18 @@
import logging
from fasthtml.fastapp import fast_app
from components.jsonviewer.constants import Routes
from core.instance_manager import debug_session, InstanceManager
jsonviwer_app, rt = fast_app()
logger = logging.getLogger("JsonViewer")
@rt(Routes.Fold)
def post(session, _id: str, node_id: str, folding: str):
logger.debug(f"Entering {Routes.Fold} with args {debug_session(session)}, {_id=}, {node_id=}, {folding=}")
instance = InstanceManager.get(session, _id)
instance.set_node_folding(node_id, folding)
return instance.render_node(node_id)

View File

@@ -0,0 +1,449 @@
# JsonViewer Hooks System - Technical Documentation
## Overview
The JsonViewer Hooks System provides a flexible, event-driven mechanism to customize the behavior and rendering of JSON nodes. Using a fluent builder pattern, developers can define conditions and actions that trigger during specific events in the JsonViewer lifecycle.
## Core Concepts
### Hook Architecture
A **Hook** consists of three components:
- **Event Type**: When the hook should trigger (`on_render`, `on_click`, etc.)
- **Conditions**: What criteria must be met for the hook to execute
- **Executor**: The function that runs when conditions are met
### HookContext
The `HookContext` object provides rich information about the current node being processed:
```python
class HookContext:
key: Any # The key of the current node
node: Any # The node object itself
helper: Any # JsonViewer helper utilities
jsonviewer: Any # Reference to the parent JsonViewer instance
json_path: str # Full JSON path (e.g., "users.0.name")
parent_node: Any # Reference to the parent node
metadata: dict # Additional metadata storage
```
**Utility Methods:**
- `get_node_type()`: Returns the string representation of the node type
- `get_value()`: Gets the actual value from the node
- `is_leaf_node()`: Checks if the node has no children
## HookBuilder API
### Creating a Hook
Use the `HookBuilder` class with method chaining to create hooks:
```python
hook = (HookBuilder()
.on_render()
.when_long_text(100)
.execute(my_custom_renderer))
```
### Event Types
#### `on_render()`
Triggers during node rendering, allowing custom rendering logic.
```python
def custom_text_renderer(context):
value = context.get_value()
return Span(f"Custom: {value}", cls="custom-text")
text_hook = (HookBuilder()
.on_render()
.when_type(str)
.execute(custom_text_renderer))
```
#### `on_click()`
Triggers when a node is clicked.
```python
def handle_click(context):
print(f"Clicked on: {context.json_path}")
return None # No rendering change
click_hook = (HookBuilder()
.on_click()
.when_editable()
.requires_modification()
.execute(handle_click))
```
#### `on_hover()` / `on_focus()`
Triggers on hover or focus events respectively.
```python
def show_tooltip(context):
return Div(f"Path: {context.json_path}", cls="tooltip")
hover_hook = (HookBuilder()
.on_hover()
.when_type(str)
.execute(show_tooltip))
```
## Conditions
Conditions determine when a hook should execute. Multiple conditions can be chained, and all must be satisfied.
### `when_type(target_type)`
Matches nodes with values of a specific type.
```python
# Hook for string values only
string_hook = (HookBuilder()
.on_render()
.when_type(str)
.execute(string_formatter))
# Hook for numeric values
number_hook = (HookBuilder()
.on_render()
.when_type((int, float)) # Accepts tuple of types
.execute(number_formatter))
```
### `when_key(key_pattern)`
Matches nodes based on their key.
```python
# Exact key match
email_hook = (HookBuilder()
.on_render()
.when_key("email")
.execute(email_formatter))
# Function-based key matching
def is_id_key(key):
return str(key).endswith("_id")
id_hook = (HookBuilder()
.on_render()
.when_key(is_id_key)
.execute(id_formatter))
```
### `when_value(target_value=None, predicate=None)`
Matches nodes based on their actual value.
**Exact value matching:**
```python
# Highlight error status
error_hook = (HookBuilder()
.on_render()
.when_value("ERROR")
.execute(lambda ctx: Span(ctx.get_value(), cls="error-status")))
# Special handling for null values
null_hook = (HookBuilder()
.on_render()
.when_value(None)
.execute(lambda ctx: Span("N/A", cls="null-value")))
```
**Predicate-based matching:**
```python
# URLs as clickable links
url_hook = (HookBuilder()
.on_render()
.when_value(predicate=lambda x: isinstance(x, str) and x.startswith("http"))
.execute(lambda ctx: A(ctx.get_value(), href=ctx.get_value(), target="_blank")))
# Large numbers formatting
large_number_hook = (HookBuilder()
.on_render()
.when_value(predicate=lambda x: isinstance(x, (int, float)) and x > 1000)
.execute(lambda ctx: Span(f"{x:,}", cls="large-number")))
```
### `when_path(path_pattern)`
Matches nodes based on their JSON path using regex.
```python
# Match all user names
user_name_hook = (HookBuilder()
.on_render()
.when_path(r"users\.\d+\.name")
.execute(user_name_formatter))
# Match any nested configuration
config_hook = (HookBuilder()
.on_render()
.when_path(r".*\.config\..*")
.execute(config_formatter))
```
### `when_long_text(threshold=100)`
Matches string values longer than the specified threshold.
```python
def text_truncator(context):
value = context.get_value()
truncated = value[:100] + "..."
return Div(
Span(truncated, cls="truncated-text"),
Button("Show more", cls="expand-btn"),
cls="long-text-container"
)
long_text_hook = (HookBuilder()
.on_render()
.when_long_text(100)
.execute(text_truncator))
```
### `when_editable(editable_paths=None, editable_types=None)`
Matches nodes that should be editable.
```python
def inline_editor(context):
value = context.get_value()
return Input(
value=str(value),
type="text" if isinstance(value, str) else "number",
cls="inline-editor",
**{"data-path": context.json_path}
)
editable_hook = (HookBuilder()
.on_click()
.when_editable(
editable_paths=["user.name", "user.email"],
editable_types=[str, int, float]
)
.requires_modification()
.execute(inline_editor))
```
### `when_custom(condition)`
Use custom condition objects or callable predicates for complex logic.
The `when_custom()` method accepts either:
- **Condition instances**: Objects that inherit from the `Condition` base class
- **Callable predicates**: Functions that take a `HookContext` parameter and return a boolean
When a callable is provided, it's automatically wrapped in a `PredicateCondition` class internally.
```python
class BusinessLogicCondition(Condition):
def evaluate(self, context):
# Complex business logic here
return (context.key == "status" and
context.get_value() in ["pending", "processing"])
custom_hook = (HookBuilder()
.on_render()
.when_custom(BusinessLogicCondition())
.execute(status_renderer))
```
## Combining Conditions
### Multiple Conditions (AND Logic)
Chain multiple conditions - all must be satisfied:
```python
complex_hook = (HookBuilder()
.on_render()
.when_type(str)
.when_key("description")
.when_long_text(50)
.execute(description_formatter))
```
### Composite Conditions
Use `when_all()` and `when_any()` for explicit logic:
```python
# AND logic
strict_hook = (HookBuilder()
.on_render()
.when_all([
WhenType(str),
WhenLongText(100),
WhenKey("content")
])
.execute(content_formatter))
# OR logic
flexible_hook = (HookBuilder()
.on_render()
.when_any([
WhenKey("title"),
WhenKey("name"),
WhenKey("label")
])
.execute(title_formatter))
```
## State Modification
Use `requires_modification()` to indicate that the hook will modify the application state:
```python
def save_edit(context):
new_value = get_new_value_from_ui() # Implementation specific
# Update the actual data
context.jsonviewer.update_value(context.json_path, new_value)
return success_indicator()
edit_hook = (HookBuilder()
.on_click()
.when_editable()
.requires_modification()
.execute(save_edit))
```
## Complete Examples
### Example 1: Enhanced Text Display
```python
def enhanced_text_renderer(context):
value = context.get_value()
# Truncate long text
if len(value) > 100:
display_value = value[:100] + "..."
tooltip = value # Full text as tooltip
else:
display_value = value
tooltip = None
return Span(
display_value,
cls="enhanced-text",
title=tooltip,
**{"data-full-text": value}
)
text_hook = (HookBuilder()
.on_render()
.when_type(str)
.when_value(predicate=lambda x: len(x) > 20)
.execute(enhanced_text_renderer))
```
### Example 2: Interactive Email Fields
```python
def email_renderer(context):
email = context.get_value()
return Div(
A(f"mailto:{email}", href=f"mailto:{email}", cls="email-link"),
Button("Copy", cls="copy-btn", **{"data-clipboard": email}),
cls="email-container"
)
email_hook = (HookBuilder()
.on_render()
.when_key("email")
.when_value(predicate=lambda x: "@" in str(x))
.execute(email_renderer))
```
### Example 3: Status Badge System
```python
def status_badge(context):
status = context.get_value().lower()
badge_classes = {
"active": "badge-success",
"pending": "badge-warning",
"error": "badge-danger",
"inactive": "badge-secondary"
}
css_class = badge_classes.get(status, "badge-default")
return Span(
status.title(),
cls=f"status-badge {css_class}"
)
status_hook = (HookBuilder()
.on_render()
.when_key("status")
.when_value(predicate=lambda x: str(x).lower() in ["active", "pending", "error", "inactive"])
.execute(status_badge))
```
## Integration with JsonViewer
### Adding Hooks to JsonViewer
```python
# Create your hooks
hooks = [
text_hook,
email_hook,
status_hook
]
# Initialize JsonViewer with hooks
viewer = JsonViewer(
session=session,
_id="my-viewer",
data=my_json_data,
hooks=hooks
)
```
### Factory Functions
Create reusable hook factories for common patterns:
```python
def create_url_link_hook():
"""Factory for URL link rendering"""
def url_renderer(context):
url = context.get_value()
return A(url, href=url, target="_blank", cls="url-link")
return (HookBuilder()
.on_render()
.when_value(predicate=lambda x: isinstance(x, str) and x.startswith(("http://", "https://")))
.execute(url_renderer))
def create_currency_formatter_hook(currency_symbol="$"):
"""Factory for currency formatting"""
def currency_renderer(context):
amount = context.get_value()
return Span(f"{currency_symbol}{amount:,.2f}", cls="currency-amount")
return (HookBuilder()
.on_render()
.when_type((int, float))
.when_key(lambda k: "price" in str(k).lower() or "amount" in str(k).lower())
.execute(currency_renderer))
# Usage
hooks = [
create_url_link_hook(),
create_currency_formatter_hook("€"),
]
```
## Best Practices
1. **Specific Conditions**: Use the most specific conditions possible to avoid unintended matches
2. **Performance**: Avoid complex predicates in `when_value()` for large datasets
3. **Error Handling**: Include error handling in your executor functions
4. **Reusability**: Create factory functions for common hook patterns
5. **Testing**: Test hooks with various data structures to ensure they work as expected
## Performance Considerations
- Hooks are evaluated in the order they are added to the JsonViewer
- Only the first matching hook for each event type will execute per node
- Use simple conditions when possible to minimize evaluation time
- Consider the size of your JSON data when using regex in `when_path()`

View File

View File

@@ -0,0 +1,27 @@
from fastcore.basics import NotStr
# Fluent CaretRight20Filled
icon_collapsed = NotStr("""<svg name="collapsed" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20">
<g fill="none">
<path d="M7 14.204a1 1 0 0 0 1.628.778l4.723-3.815a1.5 1.5 0 0 0 0-2.334L8.628 5.02A1 1 0 0 0 7 5.797v8.407z" fill="currentColor">
</path>
</g>
</svg>""")
# Fluent CaretDown20Filled
icon_expanded = NotStr("""<svg name="expanded" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20">
<g fill="none">
<path d="M5.797 7a1 1 0 0 0-.778 1.628l3.814 4.723a1.5 1.5 0 0 0 2.334 0l3.815-4.723A1 1 0 0 0 14.204 7H5.797z" fill="currentColor">
</path>
</g>
</svg>""")
icon_class = NotStr("""
<svg name="expanded" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
<g fill="none" stroke="currentColor" stroke-width="1.5" >
<polygon points="5,2 2,8 8,8" />
<rect x="12" y="2" width="6" height="6"/>
<circle cx="5" cy="15" r="3" />
<polygon points="11.5,15 15,11.5 18.5,15 15,18.5" />
</g>
</svg>""")

View File

@@ -0,0 +1,23 @@
from components.jsonviewer.constants import ROUTE_ROOT, Routes
class JsonViewerCommands:
def __init__(self, owner):
self._owner = owner
self._id = owner.get_id()
def fold(self, node_id: str, folding: str):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.Fold}",
"hx-target": f"#{node_id}",
"hx-swap": "outerHTML",
"hx-vals": f'{{"_id": "{self._id}", "node_id": "{node_id}", "folding": "{folding}"}}',
}
def open_digest(self, user_id, digest):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.DbEngineDigest}",
"hx-target": f"#{self._owner.get_owner().tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'{{"_id": "{self._id}", "user_id": "{user_id}", "digest": "{digest}"}}',
}

View File

@@ -0,0 +1,544 @@
from dataclasses import dataclass, field
from typing import Any, Optional
from fasthtml.components import *
from pandas import DataFrame
from components.BaseComponent import BaseComponentMultipleInstance
from components.datagrid_new.components.DataGrid import DataGrid
from components.jsonviewer.assets.icons import icon_expanded, icon_collapsed, icon_class
from components.jsonviewer.commands import JsonViewerCommands
from components.jsonviewer.constants import NODES_KEYS_TO_NOT_EXPAND, NODE_OBJECT, INDENT_SIZE, MAX_TEXT_LENGTH
from components.jsonviewer.hooks import HookManager, HookContext, EventType, Hook
from components_helpers import apply_boundaries
from core.serializer import TAG_OBJECT
class FoldingMode:
COLLAPSE = "collapse"
EXPAND = "expand"
@dataclass
class Node:
value: Any
@dataclass
class ValueNode(Node):
hint: str = None
@dataclass
class ListNode(Node):
node_id: str
level: int
children: list[Node] = field(default_factory=list)
@dataclass
class DictNode(Node):
node_id: str
level: int
children: dict[str, Node] = field(default_factory=dict)
class NodeIdGenerator:
"""Manages unique node ID generation"""
def __init__(self, base_id: str):
self.base_id = base_id
self._counter = -1
def generate(self) -> str:
self._counter += 1
return f"{self.base_id}-{self._counter}"
def reset(self):
self._counter = -1
class FoldingManager:
"""Manages folding/unfolding state of nodes"""
# A little explanation on how the folding / unfolding work
# all the nodes are either fold or unfold... except when there are not !
# self._folding_mode keeps the current value (it's FoldingMode.COLLAPSE or FoldingMode.EXPAND
# self._nodes_to_track keeps track of the exceptions
# The idea is to minimize the memory usage
def __init__(self, default_mode: str = FoldingMode.COLLAPSE):
self._folding_mode = default_mode
self._nodes_to_track = set() # exceptions to the default mode
def set_folding_mode(self, mode: str):
"""Changes the global folding mode and clears exceptions"""
self._folding_mode = mode
self._nodes_to_track.clear()
def set_node_folding(self, node_id: str, folding: str):
"""Sets specific folding state for a node"""
if folding == self._folding_mode:
self._nodes_to_track.discard(node_id)
else:
self._nodes_to_track.add(node_id)
def must_expand(self, node: Node) -> Optional[bool]:
"""Determines if a node should be expanded"""
if not isinstance(node, (ListNode, DictNode)):
return None
if self._folding_mode == FoldingMode.COLLAPSE:
return node.node_id in self._nodes_to_track
else:
return node.node_id not in self._nodes_to_track
def get_folding_mode(self) -> str:
return self._folding_mode
def get_nodes_to_track(self) -> set[str]:
return self._nodes_to_track
class NodeFactory:
"""Factory for creating nodes from data with JSON path tracking"""
def __init__(self, id_generator: NodeIdGenerator, folding_manager: FoldingManager):
self.id_generator = id_generator
self.folding_manager = folding_manager
self._nodes_by_id = {}
self._node_paths = {} # node_id -> json_path mapping
self._node_parents = {} # node_id -> parent_node mapping
def create_node(self, key: Any, data: Any, level: int = 0, json_path: str = "", parent_node: Any = None) -> Node:
"""Creates appropriate node type based on data with path tracking"""
if isinstance(data, list):
return self._create_list_node(key, data, level, json_path, parent_node)
elif isinstance(data, dict):
return self._create_dict_node(key, data, level, json_path, parent_node)
else:
return self._create_value_node(key, data, json_path, parent_node)
def _create_list_node(self, key: Any, data: list, level: int, json_path: str, parent_node: Any) -> ListNode:
node_id = self.id_generator.generate()
if level <= 1 and key not in NODES_KEYS_TO_NOT_EXPAND:
self.folding_manager._nodes_to_track.add(node_id)
node = ListNode(data, node_id, level)
self._nodes_by_id[node_id] = (key, node)
self._node_paths[node_id] = json_path
self._node_parents[node_id] = parent_node
for index, item in enumerate(data):
child_path = f"{json_path}[{index}]" if json_path else f"[{index}]"
node.children.append(self.create_node(index, item, level + 1, child_path, node))
return node
def _create_dict_node(self, key: Any, data: dict, level: int, json_path: str, parent_node: Any) -> DictNode:
node_id = self.id_generator.generate()
if level <= 1 and key not in NODES_KEYS_TO_NOT_EXPAND:
self.folding_manager._nodes_to_track.add(node_id)
node = DictNode(data, node_id, level)
self._nodes_by_id[node_id] = (key, node)
self._node_paths[node_id] = json_path
self._node_parents[node_id] = parent_node
for child_key, value in data.items():
child_path = f"{json_path}.{child_key}" if json_path else str(child_key)
node.children[child_key] = self.create_node(child_key, value, level + 1, child_path, node)
return node
def _create_value_node(self, key: Any, data: Any, json_path: str, parent_node: Any) -> ValueNode:
hint = NODE_OBJECT if key == TAG_OBJECT else None
node = ValueNode(data, hint)
# Value nodes don't have node_id, but we can still track their path for hooks
return node
def get_node_by_id(self, node_id: str) -> tuple[Any, Node]:
return self._nodes_by_id[node_id]
def get_node_path(self, node_id: str) -> str:
return self._node_paths.get(node_id, "")
def get_node_parent(self, node_id: str) -> Any:
return self._node_parents.get(node_id, None)
def clear(self):
"""Clears all stored nodes"""
self._nodes_by_id.clear()
self._node_paths.clear()
self._node_parents.clear()
class JsonViewerHelper:
class_string = f"mmt-jsonviewer-string"
class_bool = f"mmt-jsonviewer-bool"
class_number = f"mmt-jsonviewer-number"
class_null = f"mmt-jsonviewer-null"
class_digest = f"mmt-jsonviewer-digest"
class_object = f"mmt-jsonviewer-object"
class_dataframe = f"mmt-jsonviewer-dataframe"
@staticmethod
def is_sha256(_value):
return (isinstance(_value, str) and
len(_value) == 64 and
all(c in '0123456789abcdefABCDEF' for c in _value))
@staticmethod
def add_quotes(value: str) -> str:
if '"' in value and "'" in value:
return f'"{value.replace("\"", "\\\"")}"'
elif '"' in value:
return f"'{value}'"
else:
return f'"{value}"'
class NodeRenderer:
"""Single class handling all node rendering with new hook system"""
def __init__(self, session,
jsonviewer_instance,
folding_manager: FoldingManager,
commands: JsonViewerCommands,
helper: JsonViewerHelper,
hook_manager: HookManager,
node_factory: NodeFactory):
self.session = session
self.jsonviewer = jsonviewer_instance
self.folding_manager = folding_manager
self.commands = commands
self.helper = helper
self.hook_manager = hook_manager
self.node_factory = node_factory
def render(self, key: Any, node: Node, json_path: str = "", parent_node: Any = None) -> Div:
"""Main rendering method for any node"""
must_expand = self.folding_manager.must_expand(node)
return Div(
self._create_folding_icon(node, must_expand),
Span(f'{key} : ') if key is not None else None,
self._render_value(key, node, must_expand, json_path, parent_node),
style=f"margin-left: {INDENT_SIZE}px;",
id=getattr(node, "node_id", None)
)
def _create_folding_icon(self, node: Node, must_expand: Optional[bool]) -> Optional[Span]:
"""Creates folding/unfolding icon"""
if must_expand is None:
return None
return Span(
icon_expanded if must_expand else icon_collapsed,
cls="icon-16-inline mmt-jsonviewer-folding",
style=f"margin-left: -{INDENT_SIZE}px;",
**self.commands.fold(
node.node_id,
FoldingMode.COLLAPSE if must_expand else FoldingMode.EXPAND
)
)
def _render_value(self, key: Any,
node: Node,
must_expand: Optional[bool],
json_path: str = "",
parent_node: Any = None):
"""Renders the value part of a node with new hook system"""
if must_expand is False:
return self._render_collapsed_indicator(node)
# Create hook context
context = HookContext(
key=key,
node=node,
helper=self.helper,
jsonviewer=self.jsonviewer,
json_path=json_path,
parent_node=parent_node
)
# Execute render hooks and check for results
hook_results = self.hook_manager.execute_hooks(EventType.RENDER, context)
# If any hook returned a result, use the first one
if hook_results:
# Filter out None results
valid_results = [result for result in hook_results if result is not None]
if valid_results:
return valid_results[0]
# No hooks matched or returned results, use default rendering
if isinstance(node, DictNode):
return self._render_dict_node(key, node)
elif isinstance(node, ListNode):
return self._render_list_node(key, node)
else:
return self._render_value_node(key, node)
def _render_collapsed_indicator(self, node: Node) -> Span:
"""Renders collapsed indicator"""
indicator = "[...]" if isinstance(node, ListNode) else "{...}"
return Span(
indicator,
id=node.node_id,
**self.commands.fold(node.node_id, FoldingMode.EXPAND)
)
def _render_dict_node(self, key: Any, node: DictNode) -> Span:
"""Renders dictionary node"""
children_elements = []
base_path = self.node_factory.get_node_path(node.node_id)
for child_key, child_node in node.children.items():
child_path = f"{base_path}.{child_key}" if base_path else str(child_key)
children_elements.append(self.render(child_key, child_node, child_path, node))
return Span(
"{",
*children_elements,
Div("}"),
id=node.node_id
)
def _render_list_node(self, key: Any, node: ListNode) -> Span:
"""Renders list node"""
if self._should_render_list_as_grid(key, node):
return self._render_list_as_grid(key, node)
else:
return self._render_list_as_array(key, node)
def _should_render_list_as_grid(self, key: Any, node: ListNode) -> bool:
"""Determines if list should be rendered as grid"""
if len(node.children) == 0:
return False
sample_node = node.children[0]
sample_value = sample_node.value
if sample_value is None:
return False
type_ = type(sample_value)
if type_ in (int, float, str, bool, list, dict, ValueNode):
return False
# Check if hooks handle this type (simplified check)
sample_context = HookContext(
key=key,
node=sample_node,
helper=self.helper,
jsonviewer=self.jsonviewer
)
hook_results = self.hook_manager.execute_hooks(EventType.RENDER, sample_context)
if hook_results and any(result is not None for result in hook_results):
return False
return all(type(item.value) == type_ for item in node.children)
def _render_list_as_grid(self, key: Any, node: ListNode) -> Span:
"""Renders list as grid"""
type_ = type(node.children[0].value)
icon = icon_class
str_value = type_.__name__.split(".")[-1]
data = [child.value.__dict__ for child in node.children]
df = DataFrame(data)
dg = DataGrid(self.session)
dg.init_from_dataframe(df)
return Span(
Span(
Span(icon, cls="icon-16-inline mr-1"),
Span(str_value),
cls="mmt-jsonviewer-object"
),
dg,
id=node.node_id
)
def _render_list_as_array(self, key: Any, node: ListNode) -> Span:
"""Renders list as array"""
children_elements = []
base_path = self.node_factory.get_node_path(node.node_id)
for index, child_node in enumerate(node.children):
child_path = f"{base_path}[{index}]" if base_path else f"[{index}]"
children_elements.append(self.render(index, child_node, child_path, node))
return Span(
"[",
*children_elements,
Div("]"),
)
def _render_value_node(self, key: Any, node: ValueNode) -> Span:
"""Renders value node"""
data_tooltip = None
htmx_params = {}
icon = None
if isinstance(node.value, bool): # order is important bool is an int in Python !
str_value = "true" if node.value else "false"
data_class = "bool"
elif isinstance(node.value, (int, float)):
str_value = str(node.value)
data_class = "number"
elif node.value is None:
str_value = "null"
data_class = "null"
elif self.helper.is_sha256(node.value):
str_value = str(node.value)
data_class = "digest"
htmx_params = self.commands.open_digest(self.jsonviewer.user_id, node.value)
elif node.hint == NODE_OBJECT:
icon = icon_class
str_value = node.value.split(".")[-1]
data_class = "object"
elif isinstance(node.value, DataFrame):
return self._render_dataframe_value(node.value)
else:
str_value, data_tooltip = self._format_string_value(node.value)
data_class = "string"
return self._create_value_span(str_value, data_class, icon, data_tooltip, htmx_params)
def _render_dataframe_value(self, dataframe: DataFrame) -> Any:
"""Renders DataFrame value"""
dg = DataGrid(self.session)
dg.init_from_dataframe(dataframe)
return dg
def _format_string_value(self, value: Any) -> tuple[str, Optional[str]]:
"""Formats string value with tooltip if too long"""
as_str = str(value)
if len(as_str) > MAX_TEXT_LENGTH:
return as_str[:MAX_TEXT_LENGTH] + "...", as_str
else:
return self.helper.add_quotes(as_str), None
def _create_value_span(self, str_value: str, data_class: str, icon: Any,
data_tooltip: Optional[str], htmx_params: dict) -> Span:
"""Creates the final Span element for a value"""
css_class = f"mmt-jsonviewer-{data_class}"
if data_tooltip:
css_class += " mmt-tooltip"
if icon:
return Span(
Span(icon, cls="icon-16-inline mr-1"),
Span(str_value, data_tooltip=data_tooltip, **htmx_params),
cls=css_class
)
return Span(str_value, cls=css_class, data_tooltip=data_tooltip, **htmx_params)
class JsonViewer(BaseComponentMultipleInstance):
"""Main JsonViewer component with new hook system"""
COMPONENT_INSTANCE_ID = "Jsonviewer"
def __init__(self, session, _id, data=None, hooks: list[Hook] = None, key=None, boundaries=None):
super().__init__(session, _id)
self._key = key
self.data = data
self._boundaries = boundaries if boundaries else {"height": "600"}
self._commands = JsonViewerCommands(self)
# Initialize hook system (transparent to user)
self._hook_manager = HookManager()
if hooks:
self._hook_manager.add_hooks(hooks)
# Initialize helper components
self._helper = JsonViewerHelper()
self._id_generator = NodeIdGenerator(_id)
self._folding_manager = FoldingManager()
self._node_factory = NodeFactory(self._id_generator, self._folding_manager)
# Initialize renderer with hook manager
self._node_renderer = NodeRenderer(
session, self, self._folding_manager,
self._commands, self._helper, self._hook_manager, self._node_factory
)
# Create the initial node tree
self.node = self._node_factory.create_node(None, data)
@property
def user_id(self) -> str:
"""Gets user_id from session or returns default"""
return getattr(self, '_user_id', getattr(self._session, 'user_id', 'default_user'))
def set_data(self, data):
"""Updates the data and recreates the node tree"""
self.data = data
self._id_generator.reset()
self._node_factory.clear()
self.node = self._node_factory.create_node(None, data)
def add_hook(self, hook: Hook):
"""Adds a single hook to the viewer"""
self._hook_manager.add_hook(hook)
def add_hooks(self, hooks: list[Hook]):
"""Adds multiple hooks to the viewer"""
self._hook_manager.add_hooks(hooks)
def clear_hooks(self):
"""Removes all hooks from the viewer"""
self._hook_manager.clear_hooks()
def set_node_folding(self, node_id: str, folding: str):
"""Sets folding state for a specific node"""
self._folding_manager.set_node_folding(node_id, folding)
def render_node(self, node_id: str):
"""Renders a specific node by ID"""
key, node = self._node_factory.get_node_by_id(node_id)
json_path = self._node_factory.get_node_path(node_id)
parent_node = self._node_factory.get_node_parent(node_id)
return self._node_renderer.render(key, node, json_path, parent_node)
def set_folding_mode(self, folding_mode: str):
"""Sets global folding mode"""
self._folding_manager.set_folding_mode(folding_mode)
def get_folding_mode(self) -> str:
"""Gets current folding mode"""
return self._folding_manager.get_folding_mode()
def open_digest(self, user_id: str, digest: str):
"""Opens digest - preserves original method"""
return self._owner.db_engine_headers(user_id, digest)
def __ft__(self):
"""FastHTML rendering method"""
if self.node is None:
return Div("No data to display", cls="mmt-jsonviewer", id=f"{self._id}")
return Div(
Div(
self._node_renderer.render(None, self.node, "", None),
id=f"{self._id}-root",
style="margin-left: 0px;"
),
cls="mmt-jsonviewer",
id=f"{self._id}",
**apply_boundaries(self._boundaries)
)
def __eq__(self, other):
"""Equality comparison"""
if type(other) is type(self):
return self._key is not None and self._key == other._key
return False
def __hash__(self):
"""Hash method"""
return hash(self._key) if self._key is not None else super().__hash__()

View File

@@ -0,0 +1,10 @@
ROUTE_ROOT = "/jsonviewer"
INDENT_SIZE = 20
MAX_TEXT_LENGTH = 50
NODE_OBJECT = "Object"
NODES_KEYS_TO_NOT_EXPAND = ["Dataframe", "__parent__"]
class Routes:
Fold = "/fold"

View File

@@ -0,0 +1,386 @@
from abc import ABC, abstractmethod
from enum import Enum
from typing import Any, Callable, Optional
class EventType(Enum):
RENDER = "render"
CLICK = "click"
HOVER = "hover"
FOCUS = "focus"
class DefaultEditableTypes:
pass
class HookContext:
"""Enhanced context object passed to hook executors"""
def __init__(self, key: Any, node: Any, helper: Any, jsonviewer: Any,
json_path: str = None, parent_node: Any = None):
self.key = key
self.node = node
self.helper = helper
self.jsonviewer = jsonviewer
self.json_path = json_path or ""
self.parent_node = parent_node
self.metadata = {}
def get_node_type(self) -> str:
"""Returns string representation of node type"""
if hasattr(self.node, '__class__'):
return self.node.__class__.__name__
return type(self.node.value).__name__ if hasattr(self.node, 'value') else "unknown"
def get_value(self) -> Any:
"""Gets the actual value from the node"""
return getattr(self.node, 'value', self.node)
def is_leaf_node(self) -> bool:
"""Checks if this is a leaf node (no children)"""
return not hasattr(self.node, 'children') or not self.node.children
class Condition(ABC):
"""Base class for all conditions"""
@abstractmethod
def evaluate(self, context: HookContext) -> bool:
pass
class WhenLongText(Condition):
"""Condition: text length > threshold"""
def __init__(self, threshold: int = 100):
self.threshold = threshold
def evaluate(self, context: HookContext) -> bool:
value = context.get_value()
return isinstance(value, str) and len(value) > self.threshold
class WhenEditable(Condition):
"""Condition: node is editable (configurable logic)"""
def __init__(self, editable_paths: list[str] = None, editable_types: list[type] = DefaultEditableTypes):
self.editable_paths = set(editable_paths or [])
if editable_types is None:
self.editable_types = set()
else:
self.editable_types = set([str, int, float, bool] if editable_types is DefaultEditableTypes else editable_types)
def evaluate(self, context: HookContext) -> bool:
# Check if path is in editable paths
if self.editable_paths and context.json_path in self.editable_paths:
return True
# Check if type is editable
value = context.get_value()
return type(value) in self.editable_types and context.is_leaf_node()
class WhenType(Condition):
"""Condition: node value is of specific type"""
def __init__(self, target_type: type):
self.target_type = target_type
def evaluate(self, context: HookContext) -> bool:
value = context.get_value()
return isinstance(value, self.target_type)
class WhenKey(Condition):
"""Condition: node key matches pattern"""
def __init__(self, key_pattern: Any):
self.key_pattern = key_pattern
def evaluate(self, context: HookContext) -> bool:
if callable(self.key_pattern):
return self.key_pattern(context.key)
return context.key == self.key_pattern
class WhenPath(Condition):
"""Condition: JSON path matches pattern"""
def __init__(self, path_pattern: str):
self.path_pattern = path_pattern
def evaluate(self, context: HookContext) -> bool:
import re
return bool(re.match(self.path_pattern, context.json_path))
class WhenValue(Condition):
"""Condition: node value matches specific value or predicate"""
def __init__(self, target_value: Any = None, predicate: Callable[[Any], bool] = None):
if target_value is not None and predicate is not None:
raise ValueError("Cannot specify both target_value and predicate")
if target_value is None and predicate is None:
raise ValueError("Must specify either target_value or predicate")
self.target_value = target_value
self.predicate = predicate
def evaluate(self, context: HookContext) -> bool:
value = context.get_value()
if self.predicate:
return self.predicate(value)
else:
return value == self.target_value
class CompositeCondition(Condition):
"""Allows combining conditions with AND/OR logic"""
def __init__(self, conditions: list[Condition], operator: str = "AND"):
self.conditions = conditions
self.operator = operator.upper()
def evaluate(self, context: HookContext) -> bool:
if not self.conditions:
return True
results = [condition.evaluate(context) for condition in self.conditions]
if self.operator == "AND":
return all(results)
elif self.operator == "OR":
return any(results)
else:
raise ValueError(f"Unknown operator: {self.operator}")
class Hook:
"""Represents a complete hook with event, conditions, and executor"""
def __init__(self, event_type: EventType, conditions: list[Condition],
executor: Callable, requires_modification: bool = False):
self.event_type = event_type
self.conditions = conditions
self.executor = executor
self.requires_modification = requires_modification
def matches(self, event_type: EventType, context: HookContext) -> bool:
"""Checks if this hook should be executed for given event and context"""
if self.event_type != event_type:
return False
return all(condition.evaluate(context) for condition in self.conditions)
def execute(self, context: HookContext) -> Any:
"""Executes the hook with given context"""
return self.executor(context)
class HookBuilder:
"""Builder class for creating hooks with fluent interface"""
def __init__(self):
self._event_type: Optional[EventType] = None
self._conditions: list[Condition] = []
self._executor: Optional[Callable] = None
self._requires_modification: bool = False
# Event specification methods
def on_render(self):
"""Hook will be triggered on render event"""
self._event_type = EventType.RENDER
return self
def on_click(self):
"""Hook will be triggered on click event"""
self._event_type = EventType.CLICK
return self
def on_hover(self):
"""Hook will be triggered on hover event"""
self._event_type = EventType.HOVER
return self
def on_focus(self):
"""Hook will be triggered on focus event"""
self._event_type = EventType.FOCUS
return self
# Condition methods
def when_long_text(self, threshold: int = 100):
"""Add condition: text length > threshold"""
self._conditions.append(WhenLongText(threshold))
return self
def when_editable(self, editable_paths: list[str] = None, editable_types: list[type] = None):
"""Add condition: node is editable"""
self._conditions.append(WhenEditable(editable_paths, editable_types))
return self
def when_type(self, target_type: type):
"""Add condition: node value is of specific type"""
self._conditions.append(WhenType(target_type))
return self
def when_key(self, key_pattern: Any):
"""Add condition: node key matches pattern"""
self._conditions.append(WhenKey(key_pattern))
return self
def when_path(self, path_pattern: str):
"""Add condition: JSON path matches pattern"""
self._conditions.append(WhenPath(path_pattern))
return self
def when_value(self, target_value: Any = None, predicate: Callable[[Any], bool] = None):
"""Add condition: node value matches specific value or predicate"""
self._conditions.append(WhenValue(target_value, predicate))
return self
def when_custom(self, condition):
"""Add custom condition (supports both Condition instances and predicate functions)."""
if callable(condition) and not isinstance(condition, Condition):
# Wrap the predicate function in a Condition class
class PredicateCondition(Condition):
def __init__(self, predicate):
self.predicate = predicate
def evaluate(self, context):
return self.predicate(context)
condition = PredicateCondition(condition) # Pass the function to the wrapper
elif not isinstance(condition, Condition):
raise ValueError("when_custom expects a Condition instance or a callable predicate.")
self._conditions.append(condition)
return self
def when_all(self, conditions: list[Condition]):
"""Add composite condition with AND logic"""
self._conditions.append(CompositeCondition(conditions, "AND"))
return self
def when_any(self, conditions: list[Condition]):
"""Add composite condition with OR logic"""
self._conditions.append(CompositeCondition(conditions, "OR"))
return self
# Modification flag
def requires_modification(self):
"""Indicates this hook will modify the state"""
self._requires_modification = True
return self
# Execution
def execute(self, executor: Callable) -> Hook:
"""Sets the executor function and builds the hook"""
if not self._event_type:
raise ValueError("Event type must be specified (use on_render(), on_click(), etc.)")
if not executor:
raise ValueError("Executor function must be provided")
self._executor = executor
return Hook(
event_type=self._event_type,
conditions=self._conditions,
executor=self._executor,
requires_modification=self._requires_modification
)
class HookManager:
"""Manages and executes hooks for JsonViewer"""
def __init__(self):
self.hooks: list[Hook] = []
def add_hook(self, hook: Hook):
"""Adds a hook to the manager"""
self.hooks.append(hook)
def add_hooks(self, hooks: list[Hook]):
"""Adds multiple hooks to the manager"""
self.hooks.extend(hooks)
def find_matching_hooks(self, event_type: EventType, context: HookContext) -> list[Hook]:
"""Finds all hooks that match the event and context"""
return [hook for hook in self.hooks if hook.matches(event_type, context)]
def execute_hooks(self, event_type: EventType, context: HookContext) -> list[Any]:
"""Executes all matching hooks and returns results"""
matching_hooks = self.find_matching_hooks(event_type, context)
results = []
for hook in matching_hooks:
try:
result = hook.execute(context)
results.append(result)
# If this hook requires modification, we might want to stop here
# or handle the modification differently
if hook.requires_modification:
# Could add callback to parent component here
pass
except Exception as e:
# Log error but continue with other hooks
print(f"Hook execution error: {e}")
continue
return results
def clear_hooks(self):
"""Removes all hooks"""
self.hooks.clear()
# Example usage and factory functions
def create_long_text_viewer_hook(threshold: int = 100) -> Hook:
"""Factory function for common long text viewer hook"""
def text_viewer_component(context: HookContext):
from fasthtml.components import Div, Span
value = context.get_value()
truncated = value[:threshold] + "..."
return Div(
Span(truncated, cls="text-truncated"),
Span("Click to expand", cls="expand-hint"),
cls="long-text-viewer"
)
return (HookBuilder()
.on_render()
.when_long_text(threshold)
.execute(text_viewer_component))
def create_inline_editor_hook(editable_paths: list[str] = None) -> Hook:
"""Factory function for common inline editor hook"""
def inline_editor_component(context: HookContext):
from fasthtml.components import Input, Div
value = context.get_value()
return Div(
Input(
value=str(value),
type="text" if isinstance(value, str) else "number",
cls="inline-editor"
),
cls="editable-field"
)
return (HookBuilder()
.on_click()
.when_editable(editable_paths)
.requires_modification()
.execute(inline_editor_component))

View File

@@ -1,3 +1,4 @@
from fasthtml.components import Html
from fasthtml.components import *
from fasthtml.xtend import Script

View File

@@ -20,7 +20,7 @@ def get(session):
@rt(Routes.AddRepository)
def post(session, _id: str, tab_id: str, form_id: str, repository: str, table: str, tab_boundaries:str):
def post(session, _id: str, tab_id: str, form_id: str, repository: str, table: str, tab_boundaries: str):
logger.debug(
f"Entering {Routes.AddRepository} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository=}, {table=}, {tab_boundaries=}")
instance = InstanceManager.get(session, _id) # Repository
@@ -34,8 +34,9 @@ def get(session, _id: str, repository_name: str):
@rt(Routes.AddTable)
def post(session, _id: str, tab_id: str, form_id: str, repository_name: str, table_name: str, tab_boundaries:str):
logger.debug(f"Entering {Routes.AddTable} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository_name=}, {table_name=}, {tab_boundaries=}")
def post(session, _id: str, tab_id: str, form_id: str, repository_name: str, table_name: str, tab_boundaries: str):
logger.debug(
f"Entering {Routes.AddTable} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository_name=}, {table_name=}, {tab_boundaries=}")
instance = InstanceManager.get(session, _id)
return instance.add_new_table(tab_id, form_id, repository_name, table_name, json.loads(tab_boundaries))
@@ -48,7 +49,8 @@ def put(session, _id: str, repository: str):
@rt(Routes.ShowTable)
def get(session, _id: str, repository: str, table: str, tab_boundaries:str):
logger.debug(f"Entering {Routes.ShowTable} with args {debug_session(session)}, {_id=}, {repository=}, {table=}, {tab_boundaries=}")
def get(session, _id: str, repository: str, table: str, tab_boundaries: str):
logger.debug(
f"Entering {Routes.ShowTable} with args {debug_session(session)}, {_id=}, {repository=}, {table=}, {tab_boundaries=}")
instance = InstanceManager.get(session, _id)
return instance.show_table(repository, table, json.loads(tab_boundaries))

View File

@@ -152,6 +152,14 @@ class MyTabs(BaseComponent):
def get_tab_content_by_key(self, key):
return self.tabs_by_key[key].content if key in self.tabs_by_key else None
def show_tab(self, tab_key, updated_content=None):
if updated_content:
tab_id = self._get_tab_id_from_tab_key(tab_key)
self.set_tab_content(tab_id, updated_content)
self.select_tab_by_key(tab_key)
return self.refresh()
def refresh(self):
return self.render(oob=True)
@@ -188,6 +196,13 @@ class MyTabs(BaseComponent):
active_tab = next(filter(lambda t: t.active, self.tabs), None)
return active_tab.content if active_tab else None
def get_active_tab_key(self):
active_tab = next(filter(lambda t: t.active, self.tabs), None)
return active_tab.key if active_tab else None
def _get_tab_id_from_tab_key(self, tab_key):
return self.tabs_by_key[tab_key].id if tab_key in self.tabs_by_key else None
@staticmethod
def create_component_id(session):
prefix = f"{MY_TABS_INSTANCE_ID}{session['user_id']}"

View File

@@ -0,0 +1,23 @@
import logging
from fasthtml.fastapp import fast_app
from components.undo_redo.constants import Routes
from core.instance_manager import debug_session, InstanceManager
logger = logging.getLogger("UndoRedoApp")
undo_redo_app, rt = fast_app()
@rt(Routes.Undo)
def post(session, _id: str):
logger.debug(f"Entering {Routes.Undo} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.undo()
@rt(Routes.Redo)
def post(session, _id: str):
logger.debug(f"Entering {Routes.Redo} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.redo()

View File

View File

@@ -0,0 +1,7 @@
from fastcore.basics import NotStr
# carbon Undo
icon_undo = NotStr("""<svg name="undo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 32 32"><path d="M20 10H7.815l3.587-3.586L10 5l-6 6l6 6l1.402-1.415L7.818 12H20a6 6 0 0 1 0 12h-8v2h8a8 8 0 0 0 0-16z" fill="currentColor"></path></svg>""")
# carbon Redo
icon_redo = NotStr("""<svg name="redo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 32 32"><path d="M12 10h12.185l-3.587-3.586L22 5l6 6l-6 6l-1.402-1.415L24.182 12H12a6 6 0 0 0 0 12h8v2h-8a8 8 0 0 1 0-16z" fill="currentColor"></path></svg>""")

View File

@@ -0,0 +1,25 @@
from components.BaseCommandManager import BaseCommandManager
from components.undo_redo.constants import ROUTE_ROOT, Routes
class UndoRedoCommandManager(BaseCommandManager):
def __init__(self, owner):
super().__init__(owner)
def undo(self):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.Undo}",
"hx-trigger": "click, keyup[ctrlKey&&key=='z'] from:body",
"hx-target": f"#{self._id}",
"hx-swap": "innerHTML",
"hx-vals": f'{{"_id": "{self._id}"}}',
}
def redo(self):
return {
"hx-post": f"{ROUTE_ROOT}{Routes.Redo}",
"hx_trigger": "click, keyup[ctrlKey&&key=='y'] from:body",
"hx-target": f"#{self._id}",
"hx-swap": "innerHTML",
"hx-vals": f'{{"_id": "{self._id}"}}',
}

View File

@@ -0,0 +1,165 @@
import logging
from dataclasses import dataclass
from fastcore.xml import FT
from fasthtml.components import *
from components.BaseComponent import BaseComponentSingleton
from components.undo_redo.assets.icons import icon_redo, icon_undo
from components.undo_redo.commands import UndoRedoCommandManager
from components.undo_redo.constants import UNDO_REDO_INSTANCE_ID, UndoRedoAttrs
from components_helpers import mk_icon, mk_tooltip
from core.settings_management import NoDefault
logger = logging.getLogger("UndoRedoApp")
@dataclass
class CommandHistory:
attrs: UndoRedoAttrs
tab_key: str | None
digest: str | None # digest to remember
entry: str # digest to remember
key: str # key
path: str # path within the key if only on subitem needs to be updated
class UndoRedo(BaseComponentSingleton):
COMPONENT_INSTANCE_ID = UNDO_REDO_INSTANCE_ID
def __init__(self, session, _id, settings_manager=None, tabs_manager=None):
super().__init__(session, _id, settings_manager, tabs_manager)
self.index = -1
self.history = []
self._commands = UndoRedoCommandManager(self)
self._db_engine = settings_manager.get_db_engine()
def snapshot(self, undo_redo_attrs: UndoRedoAttrs, entry, key, path=None):
digest = self._settings_manager.get_digest(self._session, entry) # get the current digest (the last one)
active_tab_key = self.tabs_manager.get_active_tab_key()
# init the history if this is the first call
if len(self.history) == 0:
digest_history = self._settings_manager.history(self._session, entry, digest, 2)
command = CommandHistory(undo_redo_attrs,
active_tab_key,
digest_history[1] if len(digest_history) > 1 else None,
entry,
key,
path)
self.history.append(command)
self.index = 0
command = CommandHistory(undo_redo_attrs, active_tab_key, digest, entry, key, path)
self.history = self.history[:self.index + 1] #
self.history.append(command)
self.index = len(self.history) - 1
def undo(self):
logger.debug(f"Undo command")
if self.index < 1:
logger.debug(f" No command to undo.")
return self
current = self.history[self.index]
current_state = self._settings_manager.load(self._session, None, digest=current.digest)
previous = self.history[self.index - 1]
previous_state = self._settings_manager.load(self._session, None, digest=previous.digest)
# reapply the state
if previous_state is not NoDefault:
current_state[current.key] = previous_state[current.key]
else:
del current_state[current.key]
self._settings_manager.save(self._session, current.entry, current_state)
self.index -= 1
if current.attrs.on_undo is not None:
ret = current.attrs.on_undo()
if current.attrs.update_tab and current.tab_key is not None and current.tab_key != self.tabs_manager.get_active_tab_key():
ret = self.tabs_manager.show_tab(current.tab_key)
elif isinstance(ret, FT) and 'id' in ret.attrs:
ret.attrs["hx-swap-oob"] = "true"
return self, ret
else:
return self
def redo(self):
logger.debug(f"Redo command")
if self.index >= len(self.history) - 1:
logger.debug(f" No command to undo.")
return self
current = self.history[self.index]
current_state = self._settings_manager.load(self._session, None, digest=current.digest)
next_ = self.history[self.index + 1]
next_state = self._settings_manager.load(self._session, None, digest=next_.digest)
# reapply the state
if current_state is not NoDefault:
current_state[current.key] = next_state[current.key]
else:
current_state = {current.key: next_state[current.key]}
self._settings_manager.save(self._session, current.entry, current_state)
self.index += 1
if current.attrs.on_redo is not None:
ret = current.attrs.on_undo()
if current.attrs.update_tab and current.tab_key is not None and current.tab_key != self.tabs_manager.get_active_tab_key():
ret = self.tabs_manager.show_tab(current.tab_key)
elif isinstance(ret, FT) and 'id' in ret.attrs:
ret.attrs["hx-swap-oob"] = "true"
return self, ret
else:
return self
def refresh(self):
return self.__ft__(oob=True)
def __ft__(self, oob=False):
return Div(
self._mk_undo(),
self._mk_redo(),
id=self._id,
cls="flex",
hx_swap_oob="true" if oob else None
)
def _mk_undo(self):
if self._can_undo():
command = self.history[self.index]
return mk_tooltip(mk_icon(icon_undo,
size=24,
**self._commands.undo()),
f"Undo '{command.attrs.name}'.")
else:
return mk_tooltip(mk_icon(icon_undo,
size=24,
can_select=False,
cls="mmt-btn-disabled"),
"Nothing to undo.")
def _mk_redo(self):
if self._can_redo():
command = self.history[self.index + 1]
return mk_tooltip(mk_icon(icon_redo,
size=24,
**self._commands.redo()),
f"Redo '{command.attrs.name}'.")
else:
return mk_tooltip(mk_icon(icon_redo,
size=24,
can_select=False,
cls="mmt-btn-disabled"),
"Nothing to redo.")
def _can_undo(self):
return self.index >= 1
def _can_redo(self):
return self.index < len(self.history) - 1

View File

@@ -0,0 +1,24 @@
from dataclasses import dataclass
from typing import Callable
UNDO_REDO_INSTANCE_ID = "__UndoRedo__"
ROUTE_ROOT = "/undo"
class Routes:
Undo = "/undo"
Redo = "/redo"
@dataclass
class UndoRedoAttrs:
name: str
desc: str = None
update_tab: bool = True
on_undo: Callable = None
on_redo: Callable = None
def __post_init__(self):
if self.on_redo is None:
self.on_redo = self.on_undo

View File

@@ -4,10 +4,21 @@
using `_id={WORKFLOW_DESIGNER_INSTANCE_ID}{session['user_id']}{get_unique_id()}`
| Name | value |
|---------------|------------------|
| Canvas | `c_{self._id}` |
| Designer | `d_{self._id}` |
| Error Message | `err_{self._id}` |
| Properties | `p_{self._id}` |
| Spliter | `s_{self._id}` |
| Name | value |
|----------------------------------|--------------------------------|
| Canvas | `c_{self._id}` |
| Designer | `d_{self._id}` |
| Error Message | `err_{self._id}` |
| Properties | `p_{self._id}` |
| Properties Input Section | `pi_{self._id}` |
| Properties Output Section | `po_{self._id}` |
| Properties Properties Section | `pp_{self._id}` |
| Properties Properties drag top | `ppt_{self._id}` |
| Properties Properties drag left | `ppl_{self._id}` |
| Properties Properties drag right | `ppr_{self._id}` |
| Properties Properties content | `ppc_{self._id}` |
| Spliter | `s_{self._id}` |
| Top element | `t_{self._id}` |
| Form for properties | `f_{self._id}_{component_id}` |
| Form for output properties | `fo_{self._id}_{component_id}` |

View File

@@ -35,7 +35,7 @@ def post(session, _id: str, name: str, tab_boundaries: str):
@rt(Routes.AddComponent)
def post(session, _id: str, component_type: str, x: int, y: int):
def post(session, _id: str, component_type: str, x: float, y: float):
logger.debug(
f"Entering {Routes.AddComponent} with args {debug_session(session)}, {_id=}, {component_type=}, {x=}, {y=}")
instance = InstanceManager.get(session, _id)
@@ -43,7 +43,7 @@ def post(session, _id: str, component_type: str, x: int, y: int):
@rt(Routes.MoveComponent)
def post(session, _id: str, component_id: str, x: int, y: int):
def post(session, _id: str, component_id: str, x: float, y: float):
logger.debug(
f"Entering {Routes.MoveComponent} with args {debug_session(session)}, {_id=}, {component_id=}, {x=}, {y=}")
instance = InstanceManager.get(session, _id)
@@ -82,6 +82,14 @@ def post(session, _id: str, designer_height: int):
return instance.set_designer_height(designer_height)
@rt(Routes.UpdatePropertiesLayout)
def post(session, _id: str, input_width: int, properties_width: int, output_width: int):
logger.debug(
f"Entering {Routes.UpdatePropertiesLayout} with args {debug_session(session)}, {_id=}, {input_width=}, {properties_width=}, {output_width=}")
instance = InstanceManager.get(session, _id)
return instance.update_properties_layout(input_width, properties_width, output_width)
@rt(Routes.SelectComponent)
def post(session, _id: str, component_id: str):
logger.debug(
@@ -129,7 +137,20 @@ def post(session, _id: str, component_id: str, event_name: str, details: dict):
@rt(Routes.PlayWorkflow)
def post(session, _id: str, tab_boundaries: str):
logger.debug(
f"Entering {Routes.PlayWorkflow} with args {debug_session(session)}, {_id=}")
logger.debug(f"Entering {Routes.PlayWorkflow} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.play_workflow(json.loads(tab_boundaries))
@rt(Routes.StopWorkflow)
def post(session, _id: str):
logger.debug(f"Entering {Routes.StopWorkflow} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.stop_workflow()
@rt(Routes.Refresh)
def post(session, _id: str):
logger.debug(f"Entering {Routes.Refresh} with args {debug_session(session)}, {_id=}")
instance = InstanceManager.get(session, _id)
return instance.refresh()

View File

@@ -47,16 +47,118 @@
.wkf-properties {
box-sizing: border-box;
position: relative;
font-family: Arial, sans-serif;
background-color: var(--color-base-100); /* bg-base-100 */
}
.wkf-properties-input, .wkf-properties-output {
display: inline-block;
vertical-align: top;
padding: 10px;
box-sizing: border-box;
font-family: Arial, sans-serif;
background-color: var(--color-base-100); /* bg-base-100 */
overflow: auto;
}
.wkf-properties-input {
border-width: 1px;
border-top-left-radius: 0.5rem; /* rounded on left side */
border-bottom-left-radius: 0.5rem;
border-top-right-radius: 0; /* not rounded on right side */
border-bottom-right-radius: 0;
}
.wkf-properties-output {
border-width: 1px;
border-top-right-radius: 0.5rem; /* rounded on right side */
border-bottom-right-radius: 0.5rem;
border-top-left-radius: 0; /* not rounded on left side */
border-bottom-left-radius: 0;
}
.wkf-properties-properties {
vertical-align: top;
position: relative;
box-sizing: border-box;
overflow: auto;
}
.wkf-properties-handle-left {
position: absolute;
left: 0;
top: 0;
width: 5px;
height: 100%;
cursor: ew-resize;
background-color: transparent;
}
.wkf-properties-handle-right {
position: absolute;
right: 0;
top: 0;
width: 5px;
height: 100%;
cursor: ew-resize;
background-color: transparent;
}
.wkf-properties-top {
display: flex;
justify-content: center;
align-items: center;
cursor: move;
padding: 4px;
}
.wkf-properties-handle-top {
background-image: radial-gradient(var(--color-splitter) 40%, transparent 0);
background-repeat: repeat;
background-size: 4px 4px;
cursor: move;
display: flex;
justify-content: center;
align-items: center;
height: 8px;
width: 20px;
position: relative;
top: 1px;
}
.wkf-properties-content {
display: flex;
flex-direction: column;
height: 100%; /* or inherit from a fixed-height parent */
}
.wkf-properties-content-header {
flex-shrink: 0; /* optional: prevent it from shrinking */
}
.wkf-properties-content-form {
display: flex;
flex-direction: column;
flex-grow: 1;
overflow: hidden; /* prevent double scrollbars if needed */
}
.wkf-canvas {
position: relative;
box-sizing: border-box;
background-image:
linear-gradient(rgba(0,0,0,.1) 1px, transparent 1px),
linear-gradient(90deg, rgba(0,0,0,.1) 1px, transparent 1px);
background-size: 20px 20px;
}
.wkf-canvas-error {
border: 3px solid var(--color-error);
}
.wkf-toolbox {
min-height: 230px;
width: 8rem; /* w-32 (32 * 0.25rem = 8rem) */
@@ -89,6 +191,11 @@
transition: none;
}
.wkf-workflow-component.error {
background: var(--color-error);
}
.wkf-component-content {
padding: 0.75rem; /* p-3 in Tailwind */
border-radius: 0.5rem; /* rounded-lg in Tailwind */
@@ -99,6 +206,12 @@
align-items: center; /* items-center in Tailwind */
}
.wkf-component-content.error {
background: var(--color-error);
}
.wkf-component-content.not-run {
}
.wkf-connection-line {
position: absolute;
@@ -177,5 +290,3 @@
.wkf-connection-path-arrowhead-selected {
fill:#ef4444 !important;;
}

View File

@@ -1,6 +1,7 @@
function bindWorkflowDesigner(elementId) {
bindWorkflowDesignerToolbox(elementId)
bindWorkflowDesignerSplitter(elementId)
bindWorkflowProperties(elementId)
}
function bindWorkflowDesignerToolbox(elementId) {
@@ -204,7 +205,7 @@ function bindWorkflowDesignerToolbox(elementId) {
// Also trigger server-side selection
utils.makeRequest('/workflows/select-component', {
component_id: designer.selectedComponent
}, `#p_${elementId}`, "outerHTML");
}, `#ppc_${elementId}`, "outerHTML");
},
// Deselect all components
@@ -612,3 +613,153 @@ function bindWorkflowDesignerSplitter(elementId) {
}
}
function bindWorkflowProperties(elementId) {
let isDragging = false;
let isResizing = false;
let startX = 0;
let startWidths = {};
let resizeType = '';
console.debug("Binding Properties component for "+ elementId)
properties_component = document.getElementById(`p_${elementId}`);
if (properties_component == null) {
console.error(`'Component ' p_${elementId}' is not found !' `)
return
}
const totalWidth = properties_component.getBoundingClientRect().width
console.debug("totalWidth", totalWidth)
const minPropertiesWidth = 352; // this value avoid scroll bars
const inputSection = document.getElementById(`pi_${elementId}`);
const propertiesSection = document.getElementById(`pp_${elementId}`);
const outputSection = document.getElementById(`po_${elementId}`);
const dragHandle = document.getElementById(`ppt_${elementId}`);
const leftHandle = document.getElementById(`ppl_${elementId}`);
const rightHandle = document.getElementById(`ppr_${elementId}`);
// Drag and drop for moving properties section
dragHandle.addEventListener('mousedown', (e) => {
isDragging = true;
startX = e.clientX;
startWidths = {
input: parseInt(inputSection.style.width),
properties: parseInt(propertiesSection.style.width),
output: parseInt(outputSection.style.width)
};
e.preventDefault();
});
// Left resize handle
leftHandle.addEventListener('mousedown', (e) => {
isResizing = true;
resizeType = 'left';
startX = e.clientX;
startWidths = {
input: parseInt(inputSection.style.width),
properties: parseInt(propertiesSection.style.width),
output: parseInt(outputSection.style.width)
};
e.preventDefault();
});
// Right resize handle
rightHandle.addEventListener('mousedown', (e) => {
isResizing = true;
resizeType = 'right';
startX = e.clientX;
startWidths = {
input: parseInt(inputSection.style.width),
properties: parseInt(propertiesSection.style.width),
output: parseInt(outputSection.style.width)
};
e.preventDefault();
});
// Mouse move
document.addEventListener('mousemove', (e) => {
if (isDragging) {
const deltaX = e.clientX - startX;
let newInputWidth = startWidths.input + deltaX;
let newOutputWidth = startWidths.output - deltaX;
// Constraints
if (newInputWidth < 0) {
newInputWidth = 0;
newOutputWidth = totalWidth - startWidths.properties;
}
if (newOutputWidth < 0) {
newOutputWidth = 0;
newInputWidth = totalWidth - startWidths.properties;
}
inputSection.style.width = newInputWidth + 'px';
outputSection.style.width = newOutputWidth + 'px';
}
if (isResizing) {
const deltaX = e.clientX - startX;
let newInputWidth = startWidths.input;
let newPropertiesWidth = startWidths.properties;
let newOutputWidth = startWidths.output;
if (resizeType === 'left') {
newInputWidth = startWidths.input + deltaX;
newPropertiesWidth = startWidths.properties - deltaX;
if (newInputWidth < 0) {
newInputWidth = 0;
newPropertiesWidth = startWidths.input + startWidths.properties;
}
if (newPropertiesWidth < minPropertiesWidth) {
newPropertiesWidth = minPropertiesWidth;
newInputWidth = totalWidth - minPropertiesWidth - startWidths.output;
}
} else if (resizeType === 'right') {
newPropertiesWidth = startWidths.properties + deltaX;
newOutputWidth = startWidths.output - deltaX;
if (newOutputWidth < 0) {
newOutputWidth = 0;
newPropertiesWidth = startWidths.properties + startWidths.output;
}
if (newPropertiesWidth < minPropertiesWidth) {
newPropertiesWidth = minPropertiesWidth;
newOutputWidth = totalWidth - startWidths.input - minPropertiesWidth;
}
}
inputSection.style.width = newInputWidth + 'px';
propertiesSection.style.width = newPropertiesWidth + 'px';
outputSection.style.width = newOutputWidth + 'px';
}
});
// Mouse up
document.addEventListener('mouseup', () => {
if (isDragging || isResizing) {
// Send HTMX request with new dimensions
const currentWidths = {
input_width: parseInt(inputSection.style.width),
properties_width: parseInt(propertiesSection.style.width),
output_width: parseInt(outputSection.style.width)
};
try {
htmx.ajax('POST', '/workflows/update-properties-layout', {
target: `#${elementId}`,
headers: { "Content-Type": "application/x-www-form-urlencoded" },
swap: "outerHTML",
values: { _id: elementId, ...currentWidths }
});
} catch (error) {
console.error('HTMX request failed:', error);
throw error;
}
isDragging = false;
isResizing = false;
resizeType = '';
}
});
}

View File

@@ -23,3 +23,6 @@ icon_pause_circle = NotStr(
# fluent RecordStop20Regular
icon_stop_circle = NotStr(
"""<svg name="stop" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M10 3a7 7 0 1 0 0 14a7 7 0 0 0 0-14zm-8 7a8 8 0 1 1 16 0a8 8 0 0 1-16 0zm5-2a1 1 0 0 1 1-1h4a1 1 0 0 1 1 1v4a1 1 0 0 1-1 1H8a1 1 0 0 1-1-1V8z" fill="currentColor"></path></g></svg>""")
# fluent ArrowClockwise20Regular
icon_refresh = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M3.066 9.05a7 7 0 0 1 12.557-3.22l.126.17H12.5a.5.5 0 1 0 0 1h4a.5.5 0 0 0 .5-.5V2.502a.5.5 0 0 0-1 0v2.207a8 8 0 1 0 1.986 4.775a.5.5 0 0 0-.998.064A7 7 0 1 1 3.066 9.05z" fill="currentColor"></path></g></svg>""")

View File

@@ -37,7 +37,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
def select_processor(self, component_id: str):
return {
"hx_post": f"{ROUTE_ROOT}{Routes.SelectProcessor}",
"hx-target": f"#p_{self._id}",
"hx-target": f"#ppc_{self._id}",
"hx-swap": "outerHTML",
"hx-trigger": "change",
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
@@ -46,7 +46,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
def save_properties(self, component_id: str):
return {
"hx_post": f"{ROUTE_ROOT}{Routes.SaveProperties}",
"hx-target": f"#p_{self._id}",
"hx-target": f"#ppc_{self._id}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
}
@@ -54,7 +54,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
def cancel_properties(self, component_id: str):
return {
"hx_post": f"{ROUTE_ROOT}{Routes.CancelProperties}",
"hx-target": f"#p_{self._id}",
"hx-target": f"#ppc_{self._id}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
}
@@ -62,7 +62,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
def on_processor_details_event(self, component_id: str, event_name: str):
return {
"hx_post": f"{ROUTE_ROOT}{Routes.OnProcessorDetailsEvent}",
"hx-target": f"#p_{self._id}",
"hx-target": f"#ppc_{self._id}",
"hx-trigger": "change",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}", "event_name": "{event_name}"}}',
@@ -81,7 +81,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
"hx_post": f"{ROUTE_ROOT}{Routes.PauseWorkflow}",
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
"hx-vals": f'js:{{"_id": "{self._id}"}}',
}
def stop_workflow(self):
@@ -89,7 +89,14 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
"hx_post": f"{ROUTE_ROOT}{Routes.StopWorkflow}",
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
"hx-swap": "outerHTML",
"hx-vals": f'js:{{"_id": "{self._id}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
"hx-vals": f'js:{{"_id": "{self._id}"}}',
}
def refresh(self):
return {
"hx_post": f"{ROUTE_ROOT}{Routes.Refresh}",
"hx-swap": "none",
"hx-vals": f'js:{{"_id": "{self._id}"}}',
}

View File

@@ -6,46 +6,24 @@ from fasthtml.xtend import Script
from assets.icons import icon_error
from components.BaseComponent import BaseComponent
from components.workflows.assets.icons import icon_play, icon_pause, icon_stop
from components.undo_redo.constants import UndoRedoAttrs
from components.workflows.assets.icons import icon_play, icon_pause, icon_stop, icon_refresh
from components.workflows.commands import WorkflowDesignerCommandManager
from components.workflows.components.WorkflowDesignerProperties import WorkflowDesignerProperties
from components.workflows.components.WorkflowPlayer import WorkflowPlayer
from components.workflows.constants import WORKFLOW_DESIGNER_INSTANCE_ID, ProcessorTypes
from components.workflows.constants import WORKFLOW_DESIGNER_INSTANCE_ID, ProcessorTypes, COMPONENT_TYPES, \
PROCESSOR_TYPES
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, \
Connection, WorkflowsDesignerDbManager, WorkflowsPlayerSettings
Connection, WorkflowsDesignerDbManager, ComponentState, WorkflowsDesignerState
from components_helpers import apply_boundaries, mk_tooltip, mk_dialog_buttons, mk_icon
from core.instance_manager import InstanceManager
from core.jira import JiraRequestTypes, DEFAULT_SEARCH_FIELDS
from core.utils import get_unique_id, make_safe_id
from utils.ComponentsInstancesHelper import ComponentsInstancesHelper
from utils.DbManagementHelper import DbManagementHelper
logger = logging.getLogger("WorkflowDesigner")
# Component templates
COMPONENT_TYPES = {
ProcessorTypes.Producer: {
"title": "Data Producer",
"description": "Generates or loads data",
"icon": "📊",
"color": "bg-green-100 border-green-300 text-neutral"
},
ProcessorTypes.Filter: {
"title": "Data Filter",
"description": "Filters and transforms data",
"icon": "🔍",
"color": "bg-blue-100 border-blue-300 text-neutral"
},
ProcessorTypes.Presenter: {
"title": "Data Presenter",
"description": "Displays or exports data",
"icon": "📋",
"color": "bg-purple-100 border-purple-300 text-neutral"
}
}
PROCESSOR_TYPES = {
ProcessorTypes.Producer: ["Repository", "Jira"],
ProcessorTypes.Filter: ["Default"],
ProcessorTypes.Presenter: ["Default"]}
class WorkflowDesigner(BaseComponent):
def __init__(self, session,
@@ -61,19 +39,55 @@ class WorkflowDesigner(BaseComponent):
self._key = key
self._designer_settings = designer_settings
self._db = WorkflowsDesignerDbManager(session, settings_manager)
self._state = self._db.load_state(key)
self._undo_redo = ComponentsInstancesHelper.get_undo_redo(session)
self._state: WorkflowsDesignerState = self._db.load_state(key)
self._boundaries = boundaries
self.commands = WorkflowDesignerCommandManager(self)
self.properties = WorkflowDesignerProperties(self._session, f"{self._id}", self)
workflow_name = self._designer_settings.workflow_name
self.player = InstanceManager.get(self._session,
WorkflowPlayer.create_component_id(self._session, workflow_name),
WorkflowPlayer,
settings_manager=self._settings_manager,
tabs_manager=self.tabs_manager,
designer=self,
boundaries=boundaries)
self._error_message = None
def set_boundaries(self, boundaries: dict):
self._boundaries = boundaries
def refresh_designer(self):
return self._mk_elements()
def get_boundaries(self):
return self._boundaries
def refresh_properties(self):
return self._mk_properties()
def get_state(self) -> WorkflowsDesignerState:
return self._state
def get_db(self):
return self._db
def get_key(self):
return self._key
def refresh_designer(self, oob=False):
if oob:
return self._mk_canvas(oob)
else:
return self._mk_elements()
def refresh_properties(self, oob=False):
return self._mk_properties(oob)
def refresh(self):
return self.__ft__(oob=True)
def refresh_state(self):
self._state = self._db.load_state(self._key)
self.properties.update_layout()
self.properties.update_component(self._state.selected_component_id)
return self.__ft__(oob=True)
def add_component(self, component_type, x, y):
self._state.component_counter += 1
@@ -90,31 +104,40 @@ class WorkflowDesigner(BaseComponent):
description=info["description"],
properties={"processor_name": PROCESSOR_TYPES[component_type][0]}
)
self._state.components[component_id] = component
self._db.save_state(self._key, self._state) # update db
return self.refresh_designer()
undo_redo_attrs = UndoRedoAttrs(f"Add Component '{component_type}'", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs) # update db
return self.refresh_designer(), self._undo_redo.refresh()
def move_component(self, component_id, x, y):
if component_id in self._state.components:
self._state.components[component_id].x = int(x)
self._state.components[component_id].y = int(y)
self._db.save_state(self._key, self._state) # update db
component = self._state.components[component_id]
self._state.selected_component_id = component_id
component.x = int(x)
component.y = int(y)
undo_redo_attrs = UndoRedoAttrs(f"Move Component '{component.title}'", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs) # update db
return self.refresh_designer()
return self.refresh_designer(), self.properties.refresh(mode="form", oob=True), self._undo_redo.refresh()
def delete_component(self, component_id):
# Remove component
if component_id in self._state.components:
component = self._state.components[component_id]
del self._state.components[component_id]
# Remove related connections
self._state.connections = [connection for connection in self._state.connections
if connection.from_id != component_id and connection.to_id != component_id]
# update db
undo_redo_attrs = UndoRedoAttrs(f"Remove Component '{component.title}'", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs)
# Remove related connections
self._state.connections = [connection for connection in self._state.connections
if connection.from_id != component_id and connection.to_id != component_id]
# update db
self._db.save_state(self._key, self._state)
return self.refresh_designer()
return self.refresh_designer(), self._undo_redo.refresh()
def add_connection(self, from_id, to_id):
# Check if connection already exists
@@ -127,9 +150,10 @@ class WorkflowDesigner(BaseComponent):
self._state.connections.append(connection)
# update db
self._db.save_state(self._key, self._state)
undo_redo_attrs = UndoRedoAttrs(f"Add Connection", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs)
return self.refresh_designer()
return self.refresh_designer(), self._undo_redo.refresh()
def delete_connection(self, from_id, to_id):
for connection in self._state.connections:
@@ -137,65 +161,85 @@ class WorkflowDesigner(BaseComponent):
self._state.connections.remove(connection)
# update db
self._db.save_state(self._key, self._state)
undo_redo_attrs = UndoRedoAttrs(f"Delete Connection", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs)
return self.refresh_designer()
return self.refresh_designer(), self._undo_redo.refresh()
def set_designer_height(self, height):
self._state.designer_height = height
self._db.save_state(self._key, self._state)
return self.__ft__() # refresh the whole component
undo_redo_attrs = UndoRedoAttrs(f"Resize Designer", on_undo=lambda: self.refresh_state())
self._db.save_state(self._key, self._state, undo_redo_attrs)
return self.__ft__(), self._undo_redo.refresh() # refresh the whole component
def update_properties_layout(self, input_width, properties_width, output_width):
self._state.properties_input_width = input_width
self._state.properties_properties_width = properties_width
self._state.properties_output_width = output_width
self.properties.update_layout()
undo_redo_attrs = UndoRedoAttrs(f"Resize Properties", on_undo=lambda: self.refresh_state())
self._db.save_state(self._key, self._state, undo_redo_attrs)
return self.__ft__(), self._undo_redo.refresh() # refresh the whole component
def select_component(self, component_id):
if component_id in self._state.components:
self._state.selected_component_id = component_id
self._db.save_state(self._key, self._state)
component = self._state.components[component_id]
undo_redo_attrs = UndoRedoAttrs(f"Select Component {component.title}", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs)
return self.refresh_properties()
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
def save_properties(self, component_id: str, details: dict):
if component_id in self._state.components:
component = self._state.components[component_id]
component.properties = details
self._db.save_state(self._key, self._state)
component.properties |= details
undo_redo_attrs = UndoRedoAttrs(f"Set properties for {component.title}", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs)
logger.debug(f"Saved properties for component {component_id}: {details}")
return self.refresh_properties()
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
def cancel_properties(self, component_id: str):
if component_id in self._state.components:
logger.debug(f"Cancel saving properties for component {component_id}")
return self.refresh_properties()
return self.properties.refresh(mode="form")
def set_selected_processor(self, component_id: str, processor_name: str):
if component_id in self._state.components:
component = self._state.components[component_id]
component.properties = {"processor_name": processor_name}
self._db.save_state(self._key, self._state)
return self.refresh_properties()
undo_redo_attrs = UndoRedoAttrs(f"Set Processor for {component.title}", on_undo=self.refresh_state)
self._db.save_state(self._key, self._state, undo_redo_attrs)
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
def play_workflow(self, boundaries: dict):
if self._state.selected_component_id is None:
return self.error_message("No component selected")
self._error_message = None
workflow_name = self._designer_settings.workflow_name
player = InstanceManager.get(self._session,
WorkflowPlayer.create_component_id(self._session, workflow_name),
WorkflowPlayer,
settings_manager=self._settings_manager,
tabs_manager=self.tabs_manager,
player_settings=WorkflowsPlayerSettings(workflow_name,
list(self._state.components.values())),
boundaries=boundaries)
try:
player.run()
self.tabs_manager.add_tab(f"Workflow {workflow_name}", player, player.key)
return self.tabs_manager.refresh()
self.player.run()
if self.player.global_error:
# Show the error message in the same tab
self._error_message = self.player.global_error
except Exception as e:
return self.error_message(str(e))
else:
self.properties.set_entry_selector_data(self.player.nb_items)
# change the tab and display the results
self.player.set_boundaries(boundaries)
self.tabs_manager.add_tab(f"Workflow {self._designer_settings.workflow_name}", self.player, self.player.key)
return self.tabs_manager.refresh()
def stop_workflow(self):
self._error_message = None
self.player.stop()
self.properties.set_entry_selector_data(0)
return self.tabs_manager.refresh()
def on_processor_details_event(self, component_id: str, event_name: str, details: dict):
if component_id in self._state.components:
@@ -204,21 +248,30 @@ class WorkflowDesigner(BaseComponent):
component.properties["repository"] = details["repository"]
tables = DbManagementHelper.list_tables(self._session, details["repository"])
component.properties["table"] = tables[0] if len(tables) > 0 else None
elif event_name == "OnJiraRequestTypeChanged":
component.properties["request_type"] = details["request_type"]
return self.refresh_properties()
return self.properties.refresh(mode="form")
def error_message(self, message: str):
self._error_message = message
return self.tabs_manager.refresh()
def get_workflow_name(self):
return self._designer_settings.workflow_name
def __ft__(self):
def get_workflow_components(self):
return self._state.components.values()
def get_workflow_connections(self):
return self._state.connections
def __ft__(self, oob=False):
return Div(
H1(f"{self._designer_settings.workflow_name}", cls="text-xl font-bold"),
P("Drag components from the toolbox to the canvas to create your workflow.", cls="text-sm mb-6"),
# P("Drag components from the toolbox to the canvas to create your workflow.", cls="text-sm"),
Div(
self._mk_media(),
# self._mk_refresh_button(),
self._mk_error_message(),
cls="flex mb-2"
cls="flex mb-2",
id=f"t_{self._id}"
),
self._mk_designer(),
Div(cls="wkf-splitter", id=f"s_{self._id}"),
@@ -226,6 +279,7 @@ class WorkflowDesigner(BaseComponent):
Script(f"bindWorkflowDesigner('{self._id}');"),
**apply_boundaries(self._boundaries),
id=f"{self._id}",
hx_swap_oob='true' if oob else None,
)
def _mk_connection_svg(self, conn: Connection):
@@ -259,19 +313,67 @@ class WorkflowDesigner(BaseComponent):
</svg>
"""
def _mk_component(self, component: WorkflowComponent):
runtime_state = self.player.get_component_runtime_state(component.id)
info = COMPONENT_TYPES[component.type]
is_selected = self._state.selected_component_id == component.id
tooltip_content = None
tooltip_class = ""
if runtime_state.state == ComponentState.FAILURE:
state_class = 'error' # To be styled with a red highlight
tooltip_content = runtime_state.error_message
tooltip_class = "mmt-tooltip"
elif runtime_state.state == ComponentState.NOT_RUN:
state_class = 'not-run' # To be styled as greyed-out
else:
state_class = ''
return Div(
# Input connection point
Div(cls="wkf-connection-point wkf-input-point",
data_component_id=component.id,
data_point_type="input"),
# Component content
Div(
Span(info["icon"], cls="text-xl mb-1"),
H4(component.title, cls="font-semibold text-xs"),
cls=f"wkf-component-content {info['color']} {state_class}"
),
# Output connection point
Div(cls="wkf-connection-point wkf-output-point",
data_component_id=component.id,
data_point_type="output"),
cls=f"wkf-workflow-component w-32 {'selected' if is_selected else ''} {tooltip_class}",
style=f"left: {component.x}px; top: {component.y}px;",
data_component_id=component.id,
data_tooltip=tooltip_content,
draggable="true"
)
def _mk_elements(self):
if len(self._state.components) == 0:
return Div("Drag components from the toolbox to the canvas to create your workflow.",
cls="flex items-center justify-center h-full w-full"
)
return Div(
# Render connections
*[NotStr(self._mk_connection_svg(conn)) for conn in self._state.connections],
# Render components
*[self._mk_workflow_component(comp) for comp in self._state.components.values()],
*[self._mk_component(comp) for comp in self._state.components.values()],
)
def _mk_canvas(self, oob=False):
return Div(
self._mk_elements(),
cls="wkf-canvas flex-1 rounded-lg border flex-1",
cls=f"wkf-canvas flex-1 rounded-lg border flex-1 {'wkf-canvas-error' if self._error_message else ''}",
id=f"c_{self._id}",
hx_swap_oob='true' if oob else None,
),
@@ -291,7 +393,7 @@ class WorkflowDesigner(BaseComponent):
self._mk_toolbox(), # (Left side)
self._mk_canvas(), # (Right side)
cls="wkf-designer flex gap-4",
cls="wkf-designer flex gap-1",
id=f"d_{self._id}",
style=f"height:{self._state.designer_height}px;"
)
@@ -299,11 +401,14 @@ class WorkflowDesigner(BaseComponent):
def _mk_media(self):
return Div(
mk_icon(icon_play, cls="mr-1", **self.commands.play_workflow()),
mk_icon(icon_pause, cls="mr-1", **self.commands.play_workflow()),
mk_icon(icon_stop, cls="mr-1", **self.commands.play_workflow()),
mk_icon(icon_pause, cls="mr-1", **self.commands.pause_workflow()),
mk_icon(icon_stop, cls="mr-1", **self.commands.stop_workflow()),
cls=f"media-controls flex m-2"
)
def _mk_refresh_button(self):
return mk_icon(icon_refresh, **self.commands.refresh())
def _mk_error_message(self):
if not self._error_message:
return Div()
@@ -328,6 +433,17 @@ class WorkflowDesigner(BaseComponent):
return Div('Not defined yet !')
def _mk_properties_output(self, component):
return Div(
"Output name",
Input(type="input",
name="output_name",
placeholder="data",
value=component.properties.get("output_name", None),
cls="input w-xs"),
cls="join"
)
def _mk_properties_details(self, component_id, allow_component_selection=False):
def _mk_header():
return Div(
@@ -361,36 +477,91 @@ class WorkflowDesigner(BaseComponent):
return Div(
Form(
_mk_header(),
_mk_select(),
self._mk_processor_properties(component, selected_processor_name),
Div(
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Properties", checked="checked"),
Div(
_mk_select(),
self._mk_processor_properties(component, selected_processor_name),
cls="tab-content"
),
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Inputs"),
Div(
"Inputs",
cls="tab-content"
),
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Output"),
Div(
self._mk_properties_output(component),
cls="tab-content"
),
cls="tabs tabs-border"
),
mk_dialog_buttons(cls="mt-4",
on_ok=self.commands.save_properties(component_id),
on_cancel=self.commands.cancel_properties(component_id)),
cls="font-mono text-sm",
id=f"f_{self._id}_{component_id}",
),
Script(f"bindFormData('f_{self._id}_{component_id}');")
)
def _mk_properties(self):
return Div(
self._mk_properties_details(self._state.selected_component_id),
cls="p-2 bg-base-100 rounded-lg border",
style=f"height:{self._get_properties_height()}px;",
id=f"p_{self._id}",
)
def _mk_properties(self, oob=False):
return self.properties.__ft__(oob)
@staticmethod
def _mk_jira_processor_details(component):
def _mk_jira_processor_details(self, component):
def _mk_option(name):
return Option(name.name,
value=name.value,
selected="selected" if name.value == request_type else None)
def _mk_input_group():
if request_type == JiraRequestTypes.Search.value:
return Div(
Input(type="text",
name="request",
value=component.properties.get("request", ""),
placeholder="Enter JQL",
cls="input w-full"),
P("Write your jql code"),
)
elif request_type == JiraRequestTypes.Comments.value:
return Div(
Input(type="text",
name="request",
value=component.properties.get("request", ""),
placeholder="Issue id",
cls="input w-full"),
P("Put the issue id here"),
)
def _mk_extra_parameters():
if request_type == JiraRequestTypes.Search.value:
return Input(type="text",
name="fields",
value=component.properties.get("fields", DEFAULT_SEARCH_FIELDS),
placeholder="default fields",
cls="input w-full ml-2")
else:
return None
request_type = component.properties.get("request_type", JiraRequestTypes.Search.value)
return Div(
Fieldset(
Legend("JQL", cls="fieldset-legend"),
Input(type="text",
name="jira_jql",
value=component.properties.get("jira_jql", ""),
placeholder="Enter JQL",
cls="input w-full"),
P("Write your jsl code"),
Div(
Select(
*[_mk_option(enum) for enum in JiraRequestTypes],
cls="select w-xs",
name="request_type",
**self.commands.on_processor_details_event(component.id, "OnJiraRequestTypeChanged"),
),
_mk_extra_parameters(),
cls="flex"),
_mk_input_group(),
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
),
)
@@ -470,7 +641,8 @@ class WorkflowDesigner(BaseComponent):
value=component.properties.get("columns", ""),
placeholder="Columns to display, separated by comma",
cls="input w-full"),
P("Comma separated list of columns to display. Use * to display all columns, source=dest to rename columns."),
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
)
)
@@ -501,30 +673,3 @@ class WorkflowDesigner(BaseComponent):
draggable="true",
data_type=component_type
)
@staticmethod
def _mk_workflow_component(component: WorkflowComponent):
info = COMPONENT_TYPES[component.type]
return Div(
# Input connection point
Div(cls="wkf-connection-point wkf-input-point",
data_component_id=component.id,
data_point_type="input"),
# Component content
Div(
Span(info["icon"], cls="text-xl mb-1"),
H4(component.title, cls="font-semibold text-xs"),
cls=f"wkf-component-content {info['color']}"
),
# Output connection point
Div(cls="wkf-connection-point wkf-output-point",
data_component_id=component.id,
data_point_type="output"),
cls="wkf-workflow-component w-32",
style=f"left: {component.x}px; top: {component.y}px;",
data_component_id=component.id,
draggable="true"
)

View File

@@ -0,0 +1,384 @@
from fasthtml.common import *
from components.BaseComponent import BaseComponent
from components.entryselector.components.EntrySelector import EntrySelector
from components.jsonviewer.components.JsonViewer import JsonViewer
from components.workflows.constants import COMPONENT_TYPES, PROCESSOR_TYPES
from components_helpers import mk_dialog_buttons
from core.instance_manager import InstanceManager
from core.jira import JiraRequestTypes, DEFAULT_SEARCH_FIELDS
from utils.DbManagementHelper import DbManagementHelper
@dataclass
class DesignerLayout:
input_width: int
properties_width: int
output_width: int
class WorkflowDesignerProperties(BaseComponent):
def __init__(self, session, instance_id, owner):
super().__init__(session, instance_id)
self._owner = owner
self._boundaries = self._owner.get_boundaries()
self._commands = self._owner.commands
self.layout = None
self._component = None
self.update_layout()
self.update_component(self._owner.get_state().selected_component_id)
self.entry_selector: EntrySelector = InstanceManager.new(self._session,
EntrySelector,
owner=self,
hooks={
"on_entry_selected": self.on_entry_selector_changed})
self._input_jsonviewer: JsonViewer = InstanceManager.new(self._session,
JsonViewer)
self._output_jsonviewer: JsonViewer = InstanceManager.new(self._session,
JsonViewer)
def set_entry_selector_data(self, data):
self.entry_selector.set_data(data)
def update_layout(self):
if self._owner.get_state().properties_input_width is None:
input_width = self._boundaries["width"] // 3
properties_width = self._boundaries["width"] // 3
output_width = self._boundaries["width"] - input_width - properties_width
else:
input_width = self._owner.get_state().properties_input_width
properties_width = self._owner.get_state().properties_properties_width
output_width = self._owner.get_state().properties_output_width
self.layout = DesignerLayout(
input_width=input_width,
properties_width=properties_width,
output_width=output_width
)
def update_component(self, component_id):
if component_id is None or component_id not in self._owner.get_state().components:
self._component = None
else:
self._component = self._owner.get_state().components[component_id]
def refresh(self, mode="all", oob=False):
self.update_component(self._owner.get_state().selected_component_id)
if mode == "form":
return self._mk_content(oob=oob)
return self.__ft__(oob=oob)
def on_entry_selector_changed(self, entry):
entry = int(entry)
input_data, output_data = None, None
selected_component_id = self._owner.get_state().selected_component_id
if selected_component_id is not None:
runtime_state = self._owner.player.runtime_states.get(selected_component_id, None)
if runtime_state is not None:
input_content = runtime_state.input[entry] if len(runtime_state.input) > entry else None
output_content = runtime_state.output[entry] if len(runtime_state.output) > entry else None
if input_content is not None:
self._input_jsonviewer.set_data(input_content.item.as_dict())
input_data = self._input_jsonviewer
if output_content is not None:
self._output_jsonviewer.set_data(output_content.item.as_dict())
output_data = self._output_jsonviewer
return (self._mk_input(content=input_data, oob=True),
self._mk_output(content=output_data, oob=True))
def _mk_layout(self):
return Div(
self.entry_selector,
Div(
self._mk_input(),
self._mk_properties(),
self._mk_output(),
cls="flex",
style="height: 100%; width: 100%; flex: 1;"
)
)
def _mk_input(self, content=None, oob=False):
return Div(
content,
id=f"pi_{self._id}",
style=f"width: {self.layout.input_width}px;",
cls="wkf-properties-input",
hx_swap_oob=f'true' if oob else None,
)
def _mk_output(self, content=None, oob=False):
return Div(
content,
id=f"po_{self._id}",
style=f"width: {self.layout.output_width}px;",
cls="wkf-properties-output",
hx_swap_oob=f'true' if oob else None,
)
def _mk_properties(self):
return Div(
# Drag handle (20px height)
Div(
A(cls="wkf-properties-handle-top"),
cls="wkf-properties-top",
id=f"ppt_{self._id}",
),
# Properties content
self._mk_content(),
# Left resize handle
Div(
id=f"ppl_{self._id}",
cls="wkf-properties-handle-left"
),
# Right resize handle
Div(
id=f"ppr_{self._id}",
cls="wkf-properties-handle-right"
),
id=f"pp_{self._id}",
style=f"width: {self.layout.properties_width}px; height: 100%;",
cls="wkf-properties-properties flex flex-col",
)
def _mk_content(self, oob=False):
return Div(
self._header(),
self._form(),
cls="wkf-properties-content",
id=f"ppc_{self._id}",
hx_swap_oob=f'true' if oob else None,
)
def _header(self):
if self._component is None:
return None
icon = COMPONENT_TYPES[self._component.type]["icon"]
color = COMPONENT_TYPES[self._component.type]["color"]
return Div(
Div(
Span(icon),
H4(self._component.title, cls="font-semibold text-xs"),
cls=f"rounded-lg border-2 {color} flex text-center px-2"
),
Div(self._component.id, cls="ml-2"),
cls="flex wkf-properties-content-header",
)
def _form(self):
if self._component is None:
return None
component_id = self._component.id
return Form(
Div(
self._mk_select_processor(),
self._content_details(),
style="flex-grow: 1; overflow-y: auto;"
),
mk_dialog_buttons(cls="pb-2",
on_ok=self._commands.save_properties(component_id),
on_cancel=self._commands.cancel_properties(component_id)
),
id=f"ppf_{self._id}",
cls="wkf-properties-content-form",
)
def _mk_select_processor(self):
selected_processor_name = self._component.properties.get("processor_name", None)
return Select(
*[Option(processor_name, selected="selected" if processor_name == selected_processor_name else None)
for processor_name in PROCESSOR_TYPES[self._component.type]],
cls="select select-sm m-2",
id="processor_name",
name="processor_name",
**self._commands.select_processor(self._component.id)
)
def _content_details(self):
component_type = self._component.type
processor_name = self._component.properties.get("processor_name", None)
key = f"_mk_details_{component_type}_{processor_name}".lower()
if hasattr(self, key):
return getattr(self, key)()
else:
return Div(f"Component '{key}' not found")
def _mk_details_producer_jira(self):
def _mk_option(name):
"""
Generic helper to create options
:param name:
:return:
"""
return Option(name.name,
value=name.value,
selected="selected" if name.value == request_type else None)
def _mk_input_group():
if request_type == JiraRequestTypes.Search.value or request_type == "issues": # remove issues at some point
return [
Div(
Input(type="text",
name=f"{request_type}_fields",
value=self._component.properties.get(f"{request_type}_fields", DEFAULT_SEARCH_FIELDS),
placeholder="default fields",
cls="input w-full"),
P("Jira fields to retrieve"),
),
Div(
Input(type="text",
name=f"{request_type}_request",
value=self._component.properties.get(f"{request_type}_request", ""),
placeholder="Enter JQL",
cls="input w-full"),
P("Write your jql code"),
)
]
elif request_type in (JiraRequestTypes.Issue.value, JiraRequestTypes.Comments.value):
return [
Div(
Input(type="text",
name=f"{request_type}_request",
value=self._component.properties.get(f"{request_type}_request", ""),
placeholder="Issue id",
cls="input w-full"),
P("Put the issue id here"),
)
]
elif request_type == JiraRequestTypes.Versions.value:
return [
Div(
Input(type="text",
name=f"{request_type}_request",
value=self._component.properties.get(f"{request_type}_request", ""),
placeholder="Project key",
cls="input w-full"),
P("Enter the project key"),
)
]
else:
return [Div(f"** Not Implemented ** ('{request_type}' not supported yet)")]
request_type = self._component.properties.get("request_type", JiraRequestTypes.Search.value)
return Div(
Fieldset(
Legend("Jira", cls="fieldset-legend"),
Div(
Select(
*[_mk_option(enum) for enum in JiraRequestTypes],
cls="select w-xs",
name="request_type",
**self._commands.on_processor_details_event(self._component.id, "OnJiraRequestTypeChanged"),
),
P("Jira ressource type"),
cls="mb-4"
),
*_mk_input_group(),
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
),
)
def _mk_details_producer_repository(self):
selected_repo = self._component.properties.get("repository", None)
selected_table = self._component.properties.get("table", None)
def _mk_repositories_options():
repositories = DbManagementHelper.list_repositories(self._session)
if len(repositories) == 0:
return [Option("No repository available", disabled=True)]
return ([Option("Choose a repository", disabled=True, selected="selected" if selected_repo is None else None)] +
[Option(repo.name, selected="selected" if repo.name == selected_repo else None)
for repo in DbManagementHelper.list_repositories(self._session)])
def _mk_tables_options():
if selected_repo is None:
return [Option("No repository selected", disabled=True, selected="selected")]
tables = DbManagementHelper.list_tables(self._session, selected_repo)
if len(tables) == 0:
return [Option("No table available", disabled=True)]
return ([Option("Choose a table", disabled=True, selected="selected" if selected_table is None else None)] +
[Option(table, selected="selected" if table == selected_table else None)
for table in DbManagementHelper.list_tables(self._session, selected_repo)])
return Div(
Fieldset(
Legend("Repository", cls="fieldset-legend"),
Div(
Select(
*_mk_repositories_options(),
cls="select w-64",
id=f"repository_{self._id}",
name="repository",
**self._commands.on_processor_details_event(self._component.id, "OnRepositoryChanged"),
),
P("Select the repository"),
),
Div(
Select(
*_mk_tables_options(),
cls="select w-64",
id=f"table_{self._id}",
name="table",
),
P("Select the table"),
),
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
)
)
def _mk_details_filter_default(self):
return Div(
Fieldset(
Legend("Filter", cls="fieldset-legend"),
Input(type="text",
name="filter",
value=self._component.properties.get("filter", ""),
placeholder="Filter expression",
cls="input w-full"),
P("Filter expression"),
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
)
)
def _mk_details_presenter_default(self):
return Div(
Fieldset(
Legend("Presenter", cls="fieldset-legend"),
Input(type="text",
name="columns",
value=self._component.properties.get("columns", ""),
placeholder="Columns to display, separated by comma",
cls="input w-full"),
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
)
)
def __ft__(self, oob=False):
# return self.render()
return Div(
self._mk_layout(),
style=f"height: {self._get_height()}px;",
id=f"p_{self._id}",
hx_swap_oob=f'innerHTML' if oob else None,
cls="wkf-properties"
)
def _get_height(self):
return self._boundaries["height"] - self._owner.get_state().designer_height - 86

View File

@@ -1,3 +1,6 @@
from collections import deque
from dataclasses import dataclass
import pandas as pd
from fasthtml.components import *
@@ -6,10 +9,12 @@ from components.datagrid_new.components.DataGrid import DataGrid
from components.datagrid_new.settings import DataGridSettings
from components.workflows.commands import WorkflowPlayerCommandManager
from components.workflows.constants import WORKFLOW_PLAYER_INSTANCE_ID, ProcessorTypes
from components.workflows.db_management import WorkflowsPlayerSettings
from components.workflows.db_management import WorkflowComponentRuntimeState, \
WorkflowComponent, ComponentState
from core.instance_manager import InstanceManager
from core.utils import get_unique_id, make_safe_id
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataPresenter, DefaultDataFilter
from workflow.DefaultDataPresenter import DefaultDataPresenter
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataFilter, JiraDataProducer
grid_settings = DataGridSettings(
header_visible=True,
@@ -19,18 +24,24 @@ grid_settings = DataGridSettings(
open_settings_visible=False)
@dataclass
class WorkflowsPlayerError(Exception):
component_id: str
error: Exception
class WorkflowPlayer(BaseComponent):
def __init__(self, session,
_id=None,
settings_manager=None,
tabs_manager=None,
player_settings: WorkflowsPlayerSettings = None,
designer=None,
boundaries: dict = None):
super().__init__(session, _id)
self._settings_manager = settings_manager
self.tabs_manager = tabs_manager
self.key = f"__WorkflowPlayer_{player_settings.workflow_name}"
self._player_settings = player_settings
self._designer = designer
self.key = f"__WorkflowPlayer_{designer.get_workflow_name()}"
self._boundaries = boundaries
self.commands = WorkflowPlayerCommandManager(self)
self._datagrid = InstanceManager.get(self._session,
@@ -39,31 +50,177 @@ class WorkflowPlayer(BaseComponent):
key=self.key,
grid_settings=grid_settings,
boundaries=boundaries)
self.runtime_states = {}
self.global_error = None
self.has_error = False
self.nb_items = 0
def set_boundaries(self, boundaries: dict):
self._datagrid.set_boundaries(boundaries)
def get_component_runtime_state(self, component_id: str):
# return a default value if the player hasn't been played yet
return self.runtime_states.get(component_id, WorkflowComponentRuntimeState(component_id))
def run(self):
engine = WorkflowEngine()
for component in self._player_settings.components:
if component.type == ProcessorTypes.Producer and component.properties["processor_name"] == "Repository":
engine.add_processor(
TableDataProducer(self._session, self._settings_manager, component.properties["repository"],
component.properties["table"]))
elif component.type == ProcessorTypes.Filter and component.properties["processor_name"] == "Default":
engine.add_processor(DefaultDataFilter(component.properties["filter"]))
elif component.type == ProcessorTypes.Presenter and component.properties["processor_name"] == "Default":
engine.add_processor(DefaultDataPresenter(component.properties["columns"]))
# at least one connection is required to play
if len(self._designer.get_workflow_connections()) == 0:
self.global_error = "No connections defined."
return
self._init_state(ComponentState.NOT_RUN)
try:
sorted_components = self._get_sorted_components()
engine = self._get_engine(sorted_components)
except ValueError as e:
# Handle workflow structure errors (e.g., cycles)
self.has_error = True
self.global_error = f"Workflow configuration error: {e}"
return
except WorkflowsPlayerError as ex:
self.has_error = True
self.global_error = self._get_global_error_as_str(ex, "Failed to init ")
if ex.component_id in self.runtime_states:
self.runtime_states[ex.component_id].state = ComponentState.FAILURE
self.runtime_states[ex.component_id].error_message = str(ex.error)
return
res = engine.run_to_list()
if engine.has_error and not engine.errors:
self.has_error = True
self.global_error = engine.global_error
else: # loop through the components and update the runtime states
self.nb_items = engine.nb_items
for component in sorted_components:
runtime_state = self.runtime_states.get(component.id)
if component.id not in engine.errors:
runtime_state.state = ComponentState.SUCCESS
runtime_state.input = engine.debug[component.id]["input"]
runtime_state.output = engine.debug[component.id]["output"]
continue
# the component failed
error = engine.errors[component.id]
runtime_state.state = ComponentState.FAILURE
runtime_state.error_message = str(error)
self.global_error = self._get_global_error_as_str(error, "Error in ") # update global error as well
self.has_error = True
break # the remaining components will remain as NOT_RUN
data = [row.as_dict() for row in res]
df = pd.DataFrame(data)
self._datagrid.init_from_dataframe(df)
def stop(self):
self._init_state()
def get_dataframe(self):
return self._datagrid.get_dataframe()
def __ft__(self):
return Div(
self._datagrid,
id=self._id,
)
def _get_sorted_components(self) -> list[WorkflowComponent]:
"""
Sorts the workflow components based on their connections using topological sort.
- A connection from component A to B means A must come before B.
- Raises a ValueError if a cycle is detected.
- Raises a ValueError if a connection references a non-existent component.
- Ignores components that are not part of any connection.
:return: A list of sorted WorkflowComponent objects.
"""
components_by_id = {c.id: c for c in self._designer.get_workflow_components()}
# Get all component IDs involved in connections
involved_ids = set()
for conn in self._designer.get_workflow_connections():
involved_ids.add(conn.from_id)
involved_ids.add(conn.to_id)
# Check if all involved components exist
for component_id in involved_ids:
if component_id not in components_by_id:
raise ValueError(f"Component with ID '{component_id}' referenced in connections but does not exist.")
# Build the graph (adjacency list and in-degrees) for involved components
adj = {cid: [] for cid in involved_ids}
in_degree = {cid: 0 for cid in involved_ids}
for conn in self._designer.get_workflow_connections():
# from_id -> to_id
adj[conn.from_id].append(conn.to_id)
in_degree[conn.to_id] += 1
# Find all sources (nodes with in-degree 0)
queue = deque([cid for cid in involved_ids if in_degree[cid] == 0])
sorted_order = []
while queue:
u = queue.popleft()
sorted_order.append(u)
for v in adj.get(u, []):
in_degree[v] -= 1
if in_degree[v] == 0:
queue.append(v)
# Check for cycles
if len(sorted_order) != len(involved_ids):
raise ValueError("A cycle was detected in the workflow connections.")
# Return sorted components
return [components_by_id[cid] for cid in sorted_order]
def _get_engine(self, sorted_components) -> WorkflowEngine:
# first reorder the component, according to the connection definitions
engine = WorkflowEngine()
for component in sorted_components:
key = (component.type, component.properties["processor_name"])
try:
if key == (ProcessorTypes.Producer, "Repository"):
engine.add_processor(
TableDataProducer(self._session,
self._settings_manager,
component.id,
component.properties["repository"],
component.properties["table"]))
elif key == (ProcessorTypes.Producer, "Jira"):
request_type = component.properties["request_type"]
engine.add_processor(
JiraDataProducer(self._session,
self._settings_manager,
component.id,
component.properties["request_type"],
component.properties[f"{request_type}_request"],
component.properties.get(f"{request_type}_fields", None)))
elif key == (ProcessorTypes.Filter, "Default"):
engine.add_processor(DefaultDataFilter(component.id, component.properties["filter"]))
elif key == (ProcessorTypes.Presenter, "Default"):
engine.add_processor(DefaultDataPresenter(component.id, component.properties["columns"]))
else:
raise ValueError(
f"Unsupported processor : type={component.type}, name={component.properties['processor_name']}")
except Exception as e:
raise WorkflowsPlayerError(component.id, e)
return engine
def _init_state(self, state: ComponentState = ComponentState.SUCCESS):
self.global_error = None
self.has_error = False
self.runtime_states = {component.id: WorkflowComponentRuntimeState(component.id, state)
for component in self._designer.get_workflow_components()}
@staticmethod
def create_component_id(session, suffix=None):
prefix = f"{WORKFLOW_PLAYER_INSTANCE_ID}{session['user_id']}"
@@ -71,3 +228,10 @@ class WorkflowPlayer(BaseComponent):
suffix = get_unique_id()
return make_safe_id(f"{prefix}{suffix}")
@staticmethod
def _get_global_error_as_str(error, prefix=""):
if hasattr(error, "component_id"):
return f"{prefix}component '{error.component_id}': {error.error}"
else:
return str(error)

View File

@@ -6,11 +6,39 @@ WORKFLOW_DESIGNER_DB_ENTRY = "WorkflowDesigner"
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY = "Settings"
WORKFLOW_DESIGNER_DB_STATE_ENTRY = "State"
class ProcessorTypes:
Producer = "producer"
Filter = "filter"
Presenter = "presenter"
COMPONENT_TYPES = {
ProcessorTypes.Producer: {
"title": "Data Producer",
"description": "Generates or loads data",
"icon": "📊",
"color": "bg-green-100 border-green-300 text-neutral"
},
ProcessorTypes.Filter: {
"title": "Data Filter",
"description": "Filters and transforms data",
"icon": "🔍",
"color": "bg-blue-100 border-blue-300 text-neutral"
},
ProcessorTypes.Presenter: {
"title": "Data Presenter",
"description": "Displays or exports data",
"icon": "📋",
"color": "bg-purple-100 border-purple-300 text-neutral"
}
}
PROCESSOR_TYPES = {
ProcessorTypes.Producer: ["Repository", "Jira"],
ProcessorTypes.Filter: ["Default"],
ProcessorTypes.Presenter: ["Default"]}
ROUTE_ROOT = "/workflows"
@@ -25,6 +53,7 @@ class Routes:
AddConnection = "/add-connection"
DeleteConnection = "/delete-connection"
ResizeDesigner = "/resize-designer"
UpdatePropertiesLayout = "/update-properties-layout"
SaveProperties = "/save-properties"
CancelProperties = "/cancel-properties"
SelectProcessor = "/select-processor"
@@ -32,4 +61,4 @@ class Routes:
PlayWorkflow = "/play-workflow"
PauseWorkflow = "/pause-workflow"
StopWorkflow = "/stop-workflow"
Refresh = "/refresh"

View File

@@ -1,13 +1,26 @@
import enum
import logging
from dataclasses import dataclass, field
from components.undo_redo.constants import UndoRedoAttrs
from components.workflows.constants import WORKFLOWS_DB_ENTRY, WORKFLOW_DESIGNER_DB_ENTRY, \
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY, WORKFLOW_DESIGNER_DB_STATE_ENTRY
from core.settings_management import SettingsManager
from core.utils import make_safe_id
from utils.ComponentsInstancesHelper import ComponentsInstancesHelper
logger = logging.getLogger("WorkflowsSettings")
class ComponentState(enum.Enum):
"""
Represents the execution state of a workflow component.
"""
SUCCESS = "success"
FAILURE = "failure"
NOT_RUN = "not_run"
# Data structures
@dataclass
class WorkflowComponent:
@@ -27,6 +40,18 @@ class Connection:
to_id: str
@dataclass
class WorkflowComponentRuntimeState:
"""
Represents the runtime state of a single workflow component.
"""
id: str
state: ComponentState = ComponentState.SUCCESS
error_message: str | None = None
input: list = None
output: list = None
@dataclass
class WorkflowsDesignerSettings:
workflow_name: str = "No Name"
@@ -36,15 +61,12 @@ class WorkflowsDesignerSettings:
class WorkflowsDesignerState:
components: dict[str, WorkflowComponent] = field(default_factory=dict)
connections: list[Connection] = field(default_factory=list)
component_counter = 0
designer_height = 230
selected_component_id = None
@dataclass
class WorkflowsPlayerSettings:
workflow_name: str = "No Name"
components: list[WorkflowComponent] = None
component_counter: int = 0
designer_height: int = 230
properties_input_width: int = None
properties_properties_width: int = None
properties_output_width: int = None
selected_component_id: str | None = None
@dataclass
@@ -143,10 +165,11 @@ class WorkflowsDesignerDbManager:
def __init__(self, session: dict, settings_manager: SettingsManager):
self._session = session
self._settings_manager = settings_manager
self._undo_redo = ComponentsInstancesHelper.get_undo_redo(session)
@staticmethod
def _get_db_entry(key):
return f"{WORKFLOW_DESIGNER_DB_ENTRY}_{key}"
return make_safe_id(f"{WORKFLOW_DESIGNER_DB_ENTRY}_{key}")
def save_settings(self, key: str, settings: WorkflowsDesignerSettings):
self._settings_manager.put(self._session,
@@ -154,11 +177,17 @@ class WorkflowsDesignerDbManager:
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY,
settings)
def save_state(self, key: str, state: WorkflowsDesignerState):
def save_state(self, key: str, state: WorkflowsDesignerState, undo_redo_attrs: UndoRedoAttrs = None):
db_entry = self._get_db_entry(key)
self._settings_manager.put(self._session,
self._get_db_entry(key),
db_entry,
WORKFLOW_DESIGNER_DB_STATE_ENTRY,
state)
if undo_redo_attrs is not None:
self._undo_redo.snapshot(undo_redo_attrs,
db_entry,
WORKFLOW_DESIGNER_DB_STATE_ENTRY)
def save_all(self, key: str, settings: WorkflowsDesignerSettings = None, state: WorkflowsDesignerState = None):
items = {}

View File

@@ -3,9 +3,10 @@ from fasthtml.components import *
from core.utils import merge_classes
def mk_icon(icon, size=20, can_select=True, cls='', tooltip=None, **kwargs):
def mk_icon(icon, size=20, can_select=True, can_hover=False, cls='', tooltip=None, **kwargs):
merged_cls = merge_classes(f"icon-{size}",
'icon-btn' if can_select else '',
'mmt-btn' if can_hover else '',
cls,
kwargs)
return mk_tooltip(icon, tooltip, cls=merged_cls, **kwargs) if tooltip else Div(icon, cls=merged_cls, **kwargs)

View File

@@ -48,6 +48,9 @@ class Expando:
return self._props.copy()
def to_dict(self, mappings: dict) -> dict:
"""
Return the information as a dictionary, with the given mappings
"""
return {prop_name: self.get(path) for path, prop_name in mappings.items() if prop_name is not None}
def __hasattr__(self, item):

View File

@@ -271,6 +271,42 @@ class DbEngine:
except KeyError:
raise DbException(f"Key '{key}' not found in entry '{entry}'")
def history(self, user_id, entry, digest=None, max_items=1000):
"""
Gives the current digest and all its ancestors
:param user_id:
:param entry:
:param digest:
:param max_items:
:return:
"""
with self.lock:
logger.info(f"History for {user_id=}, {entry=}, {digest=}")
digest_to_use = digest or self._get_entry_digest(user_id, entry)
logger.debug(f"Using digest {digest_to_use}.")
count = 0
history = []
while True:
if count >= max_items or digest_to_use is None:
break
history.append(digest_to_use)
count += 1
try:
target_file = self._get_obj_path(user_id, digest_to_use)
with open(target_file, 'r', encoding='utf-8') as file:
as_dict = json.load(file)
digest_to_use = as_dict[TAG_PARENT][0]
except FileNotFoundError:
break
return history
def debug_root(self):
"""
Lists all folders in the root directory
@@ -312,7 +348,7 @@ class DbEngine:
return []
return [f for f in os.listdir(self.root) if os.path.isdir(os.path.join(self.root, f)) and f != 'refs']
def debug_get_digest(self, user_id, entry):
def get_digest(self, user_id, entry):
return self._get_entry_digest(user_id, entry)
def _serialize(self, obj):

View File

@@ -0,0 +1,76 @@
from fastcore.basics import NotStr
from core.utils import merge_classes
attr_map = {
"cls": "class",
"_id": "id",
}
def safe_attr(attr_name):
attr_name = attr_name.replace("hx_", "hx-")
attr_name = attr_name.replace("data_", "data-")
return attr_map.get(attr_name, attr_name)
def to_html(item):
if item is None:
return ""
elif isinstance(item, str):
return item
elif isinstance(item, (int, float, bool)):
return str(item)
elif isinstance(item, MyFt):
return item.to_html()
elif isinstance(item, NotStr):
return str(item)
else:
raise Exception(f"Unsupported type: {type(item)}, {item=}")
class MyFt:
def __init__(self, tag, *args, **kwargs):
self.tag = tag
self.children = args
self.attrs = {safe_attr(k): v for k, v in kwargs.items()}
def to_html(self):
body_items = [to_html(item) for item in self.children]
return f"<{self.tag} {' '.join(f'{k}="{v}"' for k, v in self.attrs.items())}>{' '.join(body_items)}</div>"
def __ft__(self):
return NotStr(self.to_html())
class MyDiv(MyFt):
def __init__(self, *args, **kwargs):
super().__init__("div", *args, **kwargs)
class MySpan(MyFt):
def __init__(self, *args, **kwargs):
super().__init__("span", *args, **kwargs)
def mk_my_ellipsis(txt: str, cls='', **kwargs):
merged_cls = merge_classes("truncate",
cls,
kwargs)
return MyDiv(txt, cls=merged_cls, data_tooltip=txt, **kwargs)
def mk_my_icon(icon, size=20, can_select=True, can_hover=False, cls='', tooltip=None, **kwargs):
merged_cls = merge_classes(f"icon-{size}",
'icon-btn' if can_select else '',
'mmt-btn' if can_hover else '',
cls,
kwargs)
return mk_my_tooltip(icon, tooltip, cls=merged_cls, **kwargs) if tooltip else MyDiv(icon, cls=merged_cls, **kwargs)
def mk_my_tooltip(element, tooltip: str, cls='', **kwargs):
merged_cls = merge_classes("mmt-tooltip",
cls,
kwargs)
return MyDiv(element, cls=merged_cls, data_tooltip=tooltip, **kwargs)

View File

@@ -47,6 +47,10 @@ class InstanceManager:
return InstanceManager._instances[key]
@staticmethod
def new(session, instance_type, **kwargs):
return InstanceManager.get(session, instance_type.create_component_id(session), instance_type, **kwargs)
@staticmethod
def register(session: dict | None, instance, instance_id: str = None):
"""

296
src/core/jira.py Normal file
View File

@@ -0,0 +1,296 @@
import json
import logging
from enum import Enum
import requests
from requests.auth import HTTPBasicAuth
from core.Expando import Expando
JIRA_ROOT = "https://altares.atlassian.net/rest/api/3"
DEFAULT_HEADERS = {"Accept": "application/json"}
DEFAULT_SEARCH_FIELDS = "summary,status,assignee"
logger = logging.getLogger("Jira")
class NotFound(Exception):
pass
class JiraRequestTypes(Enum):
Search = "search"
Issue = "issue"
Comments = "comments"
Versions = "versions"
class Jira:
"""Manage default operation to JIRA"""
def __init__(self, user_name: str, api_token: str, fields=DEFAULT_SEARCH_FIELDS):
"""
Prepare a connection to JIRA
The initialisation do not to anything,
It only stores the user_name and the api_token
Note that user_name and api_token is the recommended way to connect,
therefore, the only supported here
:param user_name:
:param api_token:
"""
self.user_name = user_name
self.api_token = api_token
self.auth = HTTPBasicAuth(self.user_name, self.api_token)
self.fields = fields
def test(self):
logger.debug(f"test with no parameters")
url = f"{JIRA_ROOT}/myself"
logger.debug(f" url: {url}")
response = requests.request(
"GET",
url,
headers=DEFAULT_HEADERS,
auth=self.auth
)
logger.debug(f" response: {response}")
logger.debug(f" response.text: {response.text}")
return response
def issue(self, issue_id: str) -> list[Expando]:
"""
Retrieve an issue
:param issue_id:
:return:
"""
logger.debug(f"comments with {issue_id=}")
url = f"{JIRA_ROOT}/issue/{issue_id}"
logger.debug(f" url: {url}")
response = requests.request(
"GET",
url,
headers=DEFAULT_HEADERS,
auth=self.auth
)
logger.debug(f" response: {response}")
logger.debug(f" response.text: {response.text}")
return [Expando(json.loads(response.text))]
def fields(self) -> list[Expando]:
"""
Retrieve the list of all fields for an issue
:return:
"""
url = f"{JIRA_ROOT}/field"
response = requests.request(
"GET",
url,
headers=DEFAULT_HEADERS,
auth=self.auth
)
as_dict = json.loads(response.text)
return [Expando(field) for field in as_dict]
def search(self, jql: str, fields=None) -> list[Expando]:
"""
Executes a JQL and returns the list of issues
:param jql:
:param fields: list of fields to retrieve
:return:
"""
logger.debug(f"search with {jql=}, {fields=}")
if not jql:
raise ValueError("Jql cannot be empty.")
if not fields:
fields = self.fields
url = f"{JIRA_ROOT}/search"
logger.debug(f" url: {url}")
headers = DEFAULT_HEADERS.copy()
headers["Content-Type"] = "application/json"
payload = {
"fields": [f.strip() for f in fields.split(",")],
"fieldsByKeys": False,
"jql": jql,
"maxResults": 500, # Does not seem to be used. It's always 100 !
"startAt": 0
}
logger.debug(f" payload: {payload}")
result = []
while True:
logger.debug(f" Request startAt '{payload['startAt']}'")
response = requests.request("POST",
url,
data=json.dumps(payload),
headers=headers,
auth=self.auth)
logger.debug(f" response: {response}")
logger.debug(f" response.text: {response.text}")
if response.status_code != 200:
raise Exception(self._format_error(response))
as_dict = json.loads(response.text)
result += as_dict["issues"]
if as_dict["startAt"] + as_dict["maxResults"] >= as_dict["total"]:
logger.debug(f" response: {response}")
# We retrieve more than the total nuber of items
break
payload["startAt"] += as_dict["maxResults"]
return [Expando(issue) for issue in result]
def comments(self, issue_id: str) -> list[Expando]:
"""
Retrieve the list of comments for an issue
:param issue_id:
:return:
"""
logger.debug(f"comments with {issue_id=}")
url = f"{JIRA_ROOT}/issue/{issue_id}/comment"
logger.debug(f" url: {url}")
response = requests.request("GET",
url,
headers=DEFAULT_HEADERS,
auth=self.auth)
logger.debug(f" response: {response}")
logger.debug(f" response.text: {response.text}")
if response.status_code != 200:
raise Exception(self._format_error(response))
as_dict = json.loads(response.text)
result = as_dict["comments"]
return [Expando(issue) for issue in result]
def versions(self, project_key):
"""
Given a project name and a version name
returns fixVersion number in JIRA
:param project_key:
:return:
"""
logger.debug(f"versions with {project_key=}")
url = f"{JIRA_ROOT}/project/{project_key}/versions"
logger.debug(f" url: {url}")
response = requests.request(
"GET",
url,
headers=DEFAULT_HEADERS,
auth=self.auth
)
logger.debug(f" response: {response}")
logger.debug(f" response.text: {response.text}")
if response.status_code != 200:
raise NotFound()
as_list = json.loads(response.text)
return [Expando(version) for version in as_list]
def extract(self, jql, mappings, updates=None) -> list[dict]:
"""
Executes a jql and returns list of dict
The <code>issue</code> object, returned by the <ref>jql</ref> methods
contains all the fields for Jira. They are not all necessary
This method selects the required fields
:param jql:
:param mappings:
:param updates: List of updates (lambda on issue) to perform
:return:
"""
logger.debug(f"Processing extract using mapping {mappings}")
def _get_field(mapping):
"""Returns the meaningful jira field, for the mapping description path"""
fields = mapping.split(".")
return fields[1] if len(fields) > 1 and fields[0] == "fields" else fields[0]
# retrieve the list of requested fields from what was asked in the mapping
jira_fields = [_get_field(mapping) for mapping in mappings]
as_string = ", ".join(jira_fields)
issues = self.issues(jql, as_string)
for issue in issues:
# apply updates if needed
if updates:
for update in updates:
update(issue)
row = {cvs_col: issue.get(jira_path) for jira_path, cvs_col in mappings.items() if cvs_col is not None}
yield row
def get_version(self, project_key, version_name):
"""
Given a project name and a version name
returns fixVersion number in JIRA
:param project_key:
:param version_name:
:return:
"""
for version in self.versions(project_key):
if version.name == version_name:
return version
raise NotFound()
def get_all_fields(self):
"""
Helper function that returns the list of all field that can be requested in an issue
:return:
"""
url = f"{JIRA_ROOT}/field"
response = requests.request(
"GET",
url,
headers=DEFAULT_HEADERS,
auth=self.auth
)
as_dict = json.loads(response.text)
return [Expando(issue) for issue in as_dict]
@staticmethod
def update_customer_refs(issue: Expando, bug_only=True, link_name=None):
issue["ticket_customer_refs"] = []
if bug_only and issue.fields.issuetype.name != "Bug":
return
for issue_link in issue.fields.issuelinks: # [i_link for i_link in issue.fields.issuelinks if i_link["type"]["name"] == "Relates"]:
if link_name and issue_link["type"]["name"] not in link_name:
continue
direction = "inwardIssue" if "inwardIssue" in issue_link else "outwardIssue"
related_issue_key = issue_link[direction]["key"]
if related_issue_key.startswith("ITSUP"):
issue.ticket_customer_refs.append(related_issue_key)
continue
@staticmethod
def _format_error(response):
if "errorMessages" in response.text:
error_messages = json.loads(response.text)["errorMessages"]
else:
error_messages = response.text
return f"Error {response.status_code} : {response.reason} : {error_messages}"

189
src/core/preprocessor.py Normal file
View File

@@ -0,0 +1,189 @@
from arpeggio import RegExMatch, ZeroOrMore, OneOrMore, ParserPython, EOF, NoMatch
class VariableParsingError(Exception):
"""Custom exception for variable parsing errors"""
def __init__(self, message, position):
self.message = message
self.position = position
super().__init__(f"Variable parsing error at position {position}: {message}")
class VariableProcessingError(Exception):
"""Custom exception for variable parsing errors"""
def __init__(self, message, position):
self.message = message
self.position = position
super().__init__(f"Variable processing error at position {position}: {message}")
def variable_name():
"""Variable name: alphanumeric characters and underscores"""
return RegExMatch(r'[a-zA-Z_][a-zA-Z0-9_]*')
def property_name():
"""Property name: same rules as variable name"""
return RegExMatch(r'[a-zA-Z_][a-zA-Z0-9_]*')
def variable_property():
"""A property access: .property_name"""
return ".", property_name
def variable():
"""A complete variable: $variable_name(.property)*"""
return "$", variable_name, ZeroOrMore(variable_property)
def text_char():
"""Any character that is not the start of a variable"""
return RegExMatch(r'[^$]')
def text_segment():
"""One or more non-variable characters"""
return OneOrMore(text_char)
def element():
"""Either a variable or a text segment"""
return [variable, text_segment]
def expression():
"""Complete expression: sequence of elements"""
return ZeroOrMore(element), EOF
class PlainTextPreprocessor:
def __init__(self):
self.parser = ParserPython(expression, debug=False, skipws=False)
@staticmethod
def _post_validation(elements):
if len(elements) < 2:
return
for element, next_element in [(element, elements[i + 1]) for i, element in enumerate(elements[:-1])]:
if element['type'] == 'variable' and next_element['type'] == 'variable':
raise VariableParsingError("Invalid syntax.", next_element['start'])
@staticmethod
def _extract_elements_from_tree(parse_tree, original_text):
"""Extract elements with positions from the parse tree"""
elements = []
def process_node(node, current_pos=0):
nonlocal elements
if hasattr(node, 'rule_name'):
if node.rule_name == 'variable':
# Extract variable information
var_start = node.position
var_end = node.position_end
var_text = original_text[var_start:var_end]
parts = var_text[1:].split('.') # Remove $ and split by .
var_name = parts[0]
properties = parts[1:] if len(parts) > 1 else []
elements.append({
"type": "variable",
"name": var_name,
"properties": properties,
"start": var_start,
"end": var_end
})
elif node.rule_name == 'text_segment':
# Extract text segment
text_start = node.position
text_end = node.position_end
content = original_text[text_start:text_end]
stripped = content.strip()
if len(stripped) > 0 and stripped[0] == '.':
raise VariableParsingError("Invalid syntax in property name.", text_start)
elements.append({
"type": "text",
"content": content,
"start": text_start,
"end": text_end
})
elif node.rule_name in ('expression', 'element'):
for child in node:
process_node(child, current_pos)
# Process children
if hasattr(node, '_tx_children') and node._tx_children:
for child in node._tx_children:
process_node(child, current_pos)
process_node(parse_tree)
return elements
def parse(self, text):
"""
Parse text and return structure with text segments and variables with positions
Returns:
[
{"type": "text", "content": "...", "start": int, "end": int},
{"type": "variable", "name": "...", "properties": [...], "start": int, "end": int}
]
"""
if not text:
return []
try:
# Parse the text
parse_tree = self.parser.parse(text)
# Extract elements from parse tree
elements = self._extract_elements_from_tree(parse_tree, text)
# Extra validations
self._post_validation(elements)
# Sort elements by start position
elements.sort(key=lambda x: x['start'])
return elements
except NoMatch as e:
# Convert Arpeggio parsing errors to our custom error
raise VariableParsingError(f"Invalid syntax", e.position)
except Exception as e:
if isinstance(e, VariableParsingError):
raise
raise VariableParsingError(f"Parsing failed: {str(e)}", 0)
def preprocess(self, text, namepace):
result = ""
elements = self.parse(text)
for element in elements:
if element['type'] == 'text':
result += element['content']
elif element['type'] == 'variable':
value = namepace.get(element['name'])
if value is None:
raise VariableProcessingError(f"Variable '{element['name']}' is not defined.", element['start'])
try:
pos = element['start'] + len(element['name']) + 1 # +1 for the starting '$'
for property_name in element['properties']:
value = getattr(value, property_name)
pos += len(property_name) + 1 # +1 for the dot '.'
except AttributeError as e:
raise VariableProcessingError(f"Invalid property '{property_name}' for variable '{element['name']}'.",
pos) from e
result += str(value)
return result

View File

@@ -98,10 +98,10 @@ class SettingsManager:
user_id, user_email = self._get_user(session)
return self._db_engine.save(user_id, user_email, entry, obj)
def load(self, session: dict, entry: str, default=NoDefault):
def load(self, session: dict, entry: str, digest=None, default=NoDefault):
user_id, _ = self._get_user(session)
try:
return self._db_engine.load(user_id, entry)
return self._db_engine.load(user_id, entry, digest)
except DbException:
return default
@@ -128,6 +128,14 @@ class SettingsManager:
return self._db_engine.exists(user_id, entry)
def get_digest(self, session: dict, entry: str):
user_id, _ = self._get_user(session)
return self._db_engine.get_digest(user_id, entry)
def history(self, session, entry, digest=None, max_items=1000):
user_id, _ = self._get_user(session)
return self._db_engine.history(user_id, entry, digest, max_items)
def get_db_engine(self):
return self._db_engine
@@ -177,7 +185,7 @@ class GenericDbManager:
if key.startswith("_"):
super().__setattr__(key, value)
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
if not (hasattr(settings, key)):
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{key}'.")
@@ -188,7 +196,7 @@ class GenericDbManager:
if item.startswith("_"):
return super().__getattribute__(item)
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
if not (hasattr(settings, item)):
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{item}'.")
@@ -250,7 +258,7 @@ class NestedSettingsManager:
self._settings_manager.save(self._session, self._obj_entry, settings)
def _get_settings_and_object(self):
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
if not hasattr(settings, self._obj_attribute):
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{self._obj_attribute}'.")

View File

@@ -1,12 +1,16 @@
import ast
import base64
import cProfile
import functools
import hashlib
import importlib
import inspect
import pkgutil
import re
import time
import types
import uuid
from datetime import datetime
from enum import Enum
from io import BytesIO
from urllib.parse import urlparse
@@ -420,6 +424,66 @@ def split_host_port(url):
return host, port
def timed(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
start = time.perf_counter()
result = func(*args, **kwargs)
end = time.perf_counter()
# get class name
class_name = None
if args:
# check the first argument to see if it's a class'
if inspect.isclass(args[0]):
class_name = args[0].__name__ # class method
elif hasattr(args[0], "__class__"):
class_name = args[0].__class__.__name__ # instance method
if class_name:
print(f"[PERF] {class_name}.{func.__name__} took {end - start:.4f} sec")
else:
print(f"[PERF] {func.__name__} took {end - start:.4f} sec")
return result
return wrapper
def profile_function(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
profiler = cProfile.Profile()
try:
profiler.enable()
result = func(*args, **kwargs)
finally:
profiler.disable()
# Determine class name if any
class_name = None
if args:
if inspect.isclass(args[0]):
class_name = args[0].__name__ # class method
elif hasattr(args[0], "__class__"):
class_name = args[0].__class__.__name__ # instance method
# Compose filename with timestamp
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
if class_name:
filename = f"{class_name}_{func.__name__}_{timestamp}.prof"
else:
filename = f"{func.__name__}_{timestamp}.prof"
# Dump stats to file
profiler.dump_stats(filename)
print(f"[PROFILE] Profiling data saved to {filename}")
return result
return wrapper
class UnreferencedNamesVisitor(ast.NodeVisitor):
"""
Try to find symbols that will be requested by the ast
@@ -463,5 +527,4 @@ class UnreferencedNamesVisitor(ast.NodeVisitor):
:rtype:
"""
self.names.add(node.arg)
self.visit_selected(node, ["value"])
self.visit_selected(node, ["value"])

View File

@@ -47,4 +47,9 @@ loggers:
AddStuffApp:
level: INFO
handlers: [ console ]
propagate: False
Jira:
level: DEBUG
handlers: [ console ]
propagate: False

View File

@@ -1,5 +1,4 @@
# global layout
import asyncio
import logging.config
import yaml
@@ -145,7 +144,10 @@ register_component("login", "components.login", "LoginApp")
register_component("register", "components.register", "RegisterApp")
register_component("theme_controller", "components.themecontroller", "ThemeControllerApp")
register_component("main_layout", "components.drawerlayout", "DrawerLayoutApp")
register_component("undo_redo", "components.undo_redo", "UndoRedoApp")
register_component("tabs", "components.tabs", "TabsApp") # before repositories
register_component("entryselector", "components.entryselector", "EntrySelectorApp")
register_component("jsonviewer", "components.jsonviewer", "JsonViewerApp")
register_component("applications", "components.applications", "ApplicationsApp")
register_component("repositories", "components.repositories", "RepositoriesApp")
register_component("workflows", "components.workflows", "WorkflowsApp")
@@ -210,6 +212,25 @@ app, rt = fast_app(
pico=False,
)
# -------------------------
# Profiling middleware
# -------------------------
# @app.middleware("http")
async def timing_middleware(request, call_next):
import time
start_total = time.perf_counter()
# Call the next middleware or route handler
response = await call_next(request)
end_total = time.perf_counter()
elapsed = end_total - start_total
print(f"[PERF] Total server time: {elapsed:.4f} sec - Path: {request.url.path}")
return response
settings_manager = SettingsManager()
import_settings = AdminImportSettings(settings_manager, None)
@@ -252,6 +273,17 @@ def get(session):
DrawerLayoutOld(pages),)
@rt('/toasting')
def get(session):
# Normally one toast is enough, this allows us to see
# different toast types in action.
add_toast(session, f"Toast is being cooked", "info")
add_toast(session, f"Toast is ready", "success")
add_toast(session, f"Toast is getting a bit crispy", "warning")
add_toast(session, f"Toast is burning!", "error")
return Titled("I like toast")
# Error Handling
@app.get("/{path:path}")
def not_found(path: str, session=None):
@@ -274,18 +306,7 @@ def not_found(path: str, session=None):
setup_toasts(app)
@rt('/toasting')
def get(session):
# Normally one toast is enough, this allows us to see
# different toast types in action.
add_toast(session, f"Toast is being cooked", "info")
add_toast(session, f"Toast is ready", "success")
add_toast(session, f"Toast is getting a bit crispy", "warning")
add_toast(session, f"Toast is burning!", "error")
return Titled("I like toast")
async def main():
def main():
logger.info(f" Starting FastHTML server on http://localhost:{APP_PORT}")
serve(port=APP_PORT)
@@ -293,9 +314,4 @@ async def main():
if __name__ == "__main__":
# Start your application
logger.info("Application starting...")
try:
asyncio.run(main())
except KeyboardInterrupt:
logger.info("\nStopping application...")
except Exception as e:
logger.error(f"Error: {e}")
main()

View File

@@ -1,4 +1,5 @@
from components.repositories.components.Repositories import Repositories
from components.undo_redo.components.UndoRedo import UndoRedo
from core.instance_manager import InstanceManager
@@ -6,4 +7,8 @@ class ComponentsInstancesHelper:
@staticmethod
def get_repositories(session):
return InstanceManager.get(session, Repositories.create_component_id(session))
@staticmethod
def get_undo_redo(session):
return InstanceManager.get(session, UndoRedo.create_component_id(session))

View File

@@ -0,0 +1,105 @@
from typing import Any
from core.Expando import Expando
from workflow.engine import DataPresenter
class DefaultDataPresenter(DataPresenter):
"""Default data presenter that returns the input data unchanged."""
def __init__(self, component_id: str, mappings_definition: str):
super().__init__(component_id)
self._mappings_definition = mappings_definition
self._split_definitions = [definition.strip() for definition in mappings_definition.split(",")]
if "*" not in mappings_definition:
self._static_mappings = self._get_static_mappings()
else:
self._static_mappings = None
def present(self, data: Any) -> Any:
self._validate_mappings_definition()
if self._static_mappings:
return Expando(data.to_dict(self._static_mappings))
dynamic_mappings = self._get_dynamic_mappings(data)
return Expando(data.to_dict(dynamic_mappings))
def _get_dynamic_mappings(self, data):
manage_conflicts = {}
mappings = {}
for mapping in self._split_definitions:
if "=" in mapping:
key, value = [s.strip() for s in mapping.split('=', 1)]
if key == "*":
# all fields
if value != "*":
raise ValueError("Only '*' is accepted when renaming wildcard.")
for key in data.as_dict().keys():
if key in manage_conflicts:
raise ValueError(f"Collision detected for field '{key}'. It is mapped from both '{manage_conflicts[key]}' and '{mapping}'.")
manage_conflicts[key] = mapping
mappings[key] = key
elif key.endswith(".*"):
# all fields in a sub-object
if value != "*" and value != "":
raise ValueError("Only '*' is accepted when renaming wildcard.")
obj_path = key[:-2]
sub_obj = data.get(obj_path)
if isinstance(sub_obj, dict):
for sub_field in sub_obj:
if sub_field in manage_conflicts:
raise ValueError(
f"Collision detected for field '{sub_field}'. It is mapped from both '{manage_conflicts[sub_field]}' and '{mapping}'.")
manage_conflicts[sub_field] = mapping
mappings[f"{obj_path}.{sub_field}"] = sub_field
else:
raise ValueError(f"Field '{obj_path}' is not an object.")
else:
mappings[key.strip()] = value.strip()
else:
if mapping == "*":
# all fields
for key in data.as_dict().keys():
mappings[key] = key
elif mapping.endswith(".*"):
# all fields in a sub-object
obj_path = mapping[:-2]
sub_obj = data.get(obj_path)
if isinstance(sub_obj, dict):
for sub_field in sub_obj:
mappings[f"{obj_path}.{sub_field}"] = f"{obj_path}.{sub_field}"
else:
raise ValueError(f"Field '{obj_path}' is not an object.")
else:
mappings[mapping] = mapping
return mappings
def _get_static_mappings(self):
mappings = {}
for mapping in self._split_definitions:
if "=" in mapping:
key, value = [s.strip() for s in mapping.split('=', 1)]
if not value:
value = key.split(".")[-1]
mappings[key] = value
else:
mappings[mapping] = mapping
return mappings
def _validate_mappings_definition(self):
last_char_was_comma = False
for i, char in enumerate(self._mappings_definition):
if char == ',':
if last_char_was_comma:
raise ValueError(f"Invalid mappings definition: Error found at index {i}")
last_char_was_comma = True
elif not char.isspace():
last_char_was_comma = False

View File

@@ -1,15 +1,37 @@
import ast
import logging
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Any, Generator
from components.admin.admin_db_manager import AdminDbManager
from core.Expando import Expando
from core.jira import Jira, JiraRequestTypes
from core.preprocessor import PlainTextPreprocessor
from core.utils import UnreferencedNamesVisitor
from utils.Datahelper import DataHelper
@dataclass
class WorkflowPayload:
processor_name: str
component_id: str
item_linkage_id: int
item: Any
class DataProcessorError(Exception):
def __init__(self, component_id, error):
self.component_id = component_id
self.error = error
class DataProcessor(ABC):
"""Base class for all data processing components."""
def __init__(self, component_id: str = None):
self.component_id = component_id
@abstractmethod
def process(self, data: Any) -> Generator[Any, None, None]:
pass
@@ -24,7 +46,11 @@ class DataProducer(DataProcessor):
pass
def process(self, data: Any) -> Generator[Any, None, None]:
yield from self.emit(data)
try:
yield from self.emit(data)
except Exception as e:
raise DataProcessorError(self.component_id, e)
class DataFilter(DataProcessor):
@@ -36,8 +62,12 @@ class DataFilter(DataProcessor):
pass
def process(self, data: Any) -> Generator[Any, None, None]:
if self.filter(data):
yield data
try:
if self.filter(data):
yield data
except Exception as e:
raise DataProcessorError(self.component_id, e)
class DataPresenter(DataProcessor):
@@ -49,13 +79,18 @@ class DataPresenter(DataProcessor):
pass
def process(self, data: Any) -> Generator[Any, None, None]:
yield self.present(data)
try:
yield self.present(data)
except Exception as e:
raise DataProcessorError(self.component_id, e)
class TableDataProducer(DataProducer):
"""Base class for data producers that emit data from a repository."""
def __init__(self, session, settings_manager, repository_name, table_name):
def __init__(self, session, settings_manager, component_id, repository_name, table_name):
super().__init__(component_id)
self._session = session
self.settings_manager = settings_manager
self.repository_name = repository_name
@@ -65,35 +100,40 @@ class TableDataProducer(DataProducer):
yield from DataHelper.get(self._session, self.settings_manager, self.repository_name, self.table_name, Expando)
class DefaultDataPresenter(DataPresenter):
"""Default data presenter that returns the input data unchanged."""
class JiraDataProducer(DataProducer):
"""Base class for data producers that emit data from Jira."""
def __init__(self, columns_as_str: str):
super().__init__()
if not columns_as_str or columns_as_str == "*":
self.mappings = None
else:
self.mappings = {}
temp_mappings = [col.strip() for col in columns_as_str.split(",")]
for mapping in temp_mappings:
if "=" in mapping:
key, value = mapping.split("=")
self.mappings[key] = value
else:
self.mappings[mapping] = mapping
logger = logging.getLogger("DataProcessor.Producer.Jira")
def present(self, data: Any) -> Any:
if self.mappings is None:
return data
def __init__(self, session, settings_manager, component_id, request_type='search', request='', fields=None):
super().__init__(component_id)
self._session = session
self.settings_manager = settings_manager
self.request_type = request_type.value if isinstance(request_type, JiraRequestTypes) else request_type
self.request = request
self.fields = fields
self.db = AdminDbManager(session, settings_manager).jira
def emit(self, data: Any = None) -> Generator[Any, None, None]:
self.logger.debug(f"Emitting data from Jira: {self.request_type} {self.request} {self.fields}")
return Expando(data.to_dict(self.mappings))
preprocessor = PlainTextPreprocessor()
preprocessed_fields = preprocessor.preprocess(self.fields, {"data": data})
self.logger.debug(f" {preprocessed_fields=}")
jira = Jira(self.db.user_name, self.db.api_token, fields=preprocessed_fields)
if not hasattr(jira, self.request_type):
raise ValueError(f"Invalid request type: {self.request_type}")
preprocessed_request = preprocessor.preprocess(self.request, {"data": data})
self.logger.debug(f" {preprocessed_request=}")
yield from getattr(jira, self.request_type)(preprocessed_request)
class DefaultDataFilter(DataFilter):
def __init__(self, filter_expression: str):
super().__init__()
def __init__(self, component_id: str, filter_expression: str):
super().__init__(component_id)
self.filter_expression = filter_expression
self._ast_tree = ast.parse(filter_expression, "<user input>", 'eval')
self._compiled = compile(self._ast_tree, "<string>", "eval")
@@ -112,44 +152,89 @@ class WorkflowEngine:
def __init__(self):
self.processors: list[DataProcessor] = []
self.has_error = False
self.global_error = None
self.errors = {}
self.debug = {}
self.nb_items = -1
def add_processor(self, processor: DataProcessor) -> 'WorkflowEngine':
"""Add a data processor to the pipeline."""
self.processors.append(processor)
return self
def _process_single_item(self, item: Any, processor_index: int = 0) -> Generator[Any, None, None]:
def _process_single_item(self, item_linkage_id, item: Any, processor_index: int = 0) -> Generator[Any, None, None]:
"""Process a single item through the remaining processors."""
if processor_index >= len(self.processors):
yield item
return
processor = self.processors[processor_index]
if not processor.component_id in self.debug:
self.debug[processor.component_id] = {"input": [], "output": []}
self.debug[processor.component_id]["input"].append(WorkflowPayload(
processor_name=processor.__class__.__name__,
component_id=processor.component_id,
item_linkage_id=item_linkage_id,
item=item))
# Process the item through the current processor
for processed_item in processor.process(item):
self.debug[processor.component_id]["output"].append(WorkflowPayload(
processor_name=processor.__class__.__name__,
component_id=processor.component_id,
item_linkage_id=item_linkage_id,
item=processed_item))
# Recursively process through remaining processors
yield from self._process_single_item(processed_item, processor_index + 1)
yield from self._process_single_item(item_linkage_id, processed_item, processor_index + 1)
def run(self) -> Generator[Any, None, None]:
"""
Run the workflow pipeline and yield results one by one.
The first processor must be a DataProducer.
"""
if not self.processors:
raise ValueError("No processors in the pipeline")
self.debug.clear()
if not self.processors:
self.has_error = False
self.global_error = "No processors in the pipeline"
self.nb_items = -1
raise ValueError(self.global_error)
self.nb_items = 0
first_processor = self.processors[0]
if not isinstance(first_processor, DataProducer):
raise ValueError("First processor must be a DataProducer")
self.has_error = False
self.global_error = "First processor must be a DataProducer"
raise ValueError(self.global_error)
for item in first_processor.emit():
yield from self._process_single_item(item, 1)
self.debug[first_processor.component_id] = {"input": [], "output": []}
for item_linkage_id, item in enumerate(first_processor.process(None)):
self.nb_items += 1
self.debug[first_processor.component_id]["output"].append(WorkflowPayload(
processor_name=first_processor.__class__.__name__,
component_id=first_processor.component_id,
item_linkage_id=item_linkage_id,
item=item))
yield from self._process_single_item(item_linkage_id, item, 1)
def run_to_list(self) -> list[Any]:
"""
Run the workflow and return all results as a list.
Use this method when you need all results at once.
"""
return list(self.run())
try:
return list(self.run())
except DataProcessorError as err:
self.has_error = True
self.errors[err.component_id] = err.error
return []
except Exception as err:
self.has_error = True
self.global_error = str(err)
return []

View File

@@ -43,6 +43,12 @@ class Contains:
"""
s: str
@dataclasses.dataclass
class DoesNotContain:
"""
To check if the attribute does not contain a specific value
"""
s: str
@dataclasses.dataclass
class JsonViewerNode:
@@ -449,6 +455,11 @@ def matches(actual, expected, path=""):
elif isinstance(expected.attrs[expected_attr], Contains):
assert expected.attrs[expected_attr].s in actual.attrs[expected_attr], \
f"{print_path(path)}Attribute '{expected_attr}' does not contain '{expected.attrs[expected_attr].s}': actual='{actual.attrs[expected_attr]}', expected ='{expected.attrs[expected_attr].s}'."
elif isinstance(expected.attrs[expected_attr], DoesNotContain):
assert expected.attrs[expected_attr].s not in actual.attrs[expected_attr], \
f"{print_path(path)}Attribute '{expected_attr}' does contain '{expected.attrs[expected_attr].s}' while it must not: actual='{actual.attrs[expected_attr]}'."
else:
assert actual.attrs[expected_attr] == expected.attrs[expected_attr], \
@@ -631,10 +642,10 @@ def extract_table_values_new(ft, header=True):
# first, get the header
if header:
header = search_elements_by_name(ft, attrs={"class": "dt2-header"}, comparison_method='contains')[0]
header_element = search_elements_by_name(ft, attrs={"class": "dt2-header"}, comparison_method='contains')[0]
header_map = {}
res = OrderedDict()
for row in header.children:
for row in header_element.children:
col_id = row.attrs["data-col"]
title = row.attrs["data-tooltip"]
header_map[col_id] = title
@@ -643,9 +654,10 @@ def extract_table_values_new(ft, header=True):
body = search_elements_by_name(ft, attrs={"class": "dt2-body"}, comparison_method='contains')[0]
for row in body.children:
for col in row.children:
col_id = col.attrs["data-col"]
cell_value = _get_cell_content_value(col)
res[header_map[col_id]].append(cell_value)
if hasattr(col, "attrs"):
col_id = col.attrs["data-col"]
cell_value = _get_cell_content_value(col)
res[header_map[col_id]].append(cell_value)
return res
@@ -750,13 +762,14 @@ def icon(name: str):
return NotStr(f'<svg name="{name}"')
def div_icon(name: str):
def div_icon(name: str, cls=None):
"""
Test if an element is an icon wrapped in a div
:param name:
:param cls:
:return:
"""
return Div(NotStr(f'<svg name="{name}"'))
return Div(NotStr(f'<svg name="{name}"'), cls=cls)
def span_icon(name: str):

34
tests/my_mocks.py Normal file
View File

@@ -0,0 +1,34 @@
from unittest.mock import MagicMock
import pytest
from fasthtml.components import *
from components.tabs.components.MyTabs import MyTabs
@pytest.fixture
def tabs_manager():
class MockTabsManager(MagicMock):
def __init__(self, *args, **kwargs):
super().__init__(*args, spec=MyTabs, **kwargs)
self.request_new_tab_id = MagicMock(side_effect=["new_tab_id", "new_tab_2", "new_tab_3", StopIteration])
self.tabs = {}
self.tabs_by_key = {}
def add_tab(self, title, content, key: str | tuple = None, tab_id: str = None, icon=None):
self.tabs[tab_id] = (title, content)
self.tabs_by_key[key] = (title, content)
def set_tab_content(self, tab_id, content, title=None, key: str | tuple = None, active=None):
self.tabs[tab_id] = (title, content)
self.tabs_by_key[key] = (title, content)
def refresh(self):
return Div(
Div(
[Div(title) for title in self.tabs.keys()]
),
list(self.tabs.values())[-1]
)
return MockTabsManager()

View File

@@ -509,3 +509,18 @@ def test_i_can_compute_footer_menu_position_when_not_enough_space(dg):
)
assert matches(menu, expected)
def test_the_content_of_the_cell_is_escaped(empty_dg):
df = pd.DataFrame({
'value': ['<div> My Content </div>'],
'value2': ['{My Content}'],
})
my_dg = empty_dg.init_from_dataframe(df)
actual = my_dg.__ft__()
table_content = extract_table_values_new(actual, header=True)
assert table_content == OrderedDict({
'value': ['&lt;div&gt; My Content &lt;/div&gt;'],
'value2': ['{My Content}']})

View File

@@ -235,3 +235,40 @@ def test_put_many_save_only_if_necessary(engine):
entry_content = engine.load(FAKE_USER_ID, "MyEntry")
assert entry_content[TAG_PARENT] == [None] # Still None, nothing was save
def test_i_can_retrieve_history_using_put(engine):
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(1, "a", False))
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(2, "a", False))
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(3, "a", False))
history = engine.history(FAKE_USER_ID, "MyEntry")
assert len(history) == 3
v0 = engine.load(FAKE_USER_ID, "MyEntry", history[0])
v1 = engine.load(FAKE_USER_ID, "MyEntry", history[1])
v2 = engine.load(FAKE_USER_ID, "MyEntry", history[2])
assert v0["key1"] == DummyObj(3, "a", False)
assert v1["key1"] == DummyObj(2, "a", False)
assert v2["key1"] == DummyObj(1, "a", False)
assert v2[TAG_PARENT] == [None]
def test_i_can_retrieve_history_using_save(engine):
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(1, "a", False)})
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(2, "a", False)})
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(3, "a", False)})
history = engine.history(FAKE_USER_ID, "MyEntry")
assert len(history) == 3
v0 = engine.load(FAKE_USER_ID, "MyEntry", history[0])
v1 = engine.load(FAKE_USER_ID, "MyEntry", history[1])
v2 = engine.load(FAKE_USER_ID, "MyEntry", history[2])
assert v0["key1"] == DummyObj(3, "a", False)
assert v1["key1"] == DummyObj(2, "a", False)
assert v2["key1"] == DummyObj(1, "a", False)
assert v2[TAG_PARENT] == [None]

View File

@@ -11,7 +11,7 @@ def sample_structure():
"""
A pytest fixture to provide a sample tree structure for testing.
"""
return Html(
return Div(
Header(cls="first-class"),
Body(
"hello world",
@@ -26,13 +26,13 @@ def sample_structure():
@pytest.mark.parametrize("value, expected, expected_error", [
(Div(), "value",
"The types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=div((),{})\nexpected=value."),
"The types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=<div></div>\nexpected=value."),
(Div(), A(),
"The elements are different: 'div' != 'a'."),
(Div(Div()), Div(A()),
"Path 'div':\n\tThe elements are different: 'div' != 'a'."),
(Div(A(Span())), Div(A("element")),
"Path 'div.a':\n\tThe types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=span((),{})\nexpected=element."),
"Path 'div.a':\n\tThe types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=<span></span>\nexpected=element."),
(Div(attr="one"), Div(attr="two"),
"Path 'div':\n\tThe values are different for 'attr' : 'one' != 'two'."),
(Div(A(attr="alpha")), Div(A(attr="beta")),
@@ -57,7 +57,8 @@ def sample_structure():
"Path 'div[class=a long attr]':\n\tAttribute 'class' does not start with 'different start': actual='a long attr', expected ='different start'."),
(Div(cls="a long attr"), Div(cls=Contains("not included")),
"Path 'div[class=a long attr]':\n\tAttribute 'class' does not contain 'not included': actual='a long attr', expected ='not included'."),
(Div(cls="a long attr"), Div(cls=DoesNotContain("long attr")),
"Path 'div[class=a long attr]':\n\tAttribute 'class' does contain 'long attr' while it must not: actual='a long attr'."),
])
def test_matches_error_expected(value, expected, expected_error):
with pytest.raises(AssertionError) as error:
@@ -75,6 +76,7 @@ def test_matches_error_expected(value, expected, expected_error):
(Div(), Div(Empty)),
(Div(cls="a long attr"), Div(cls=StartsWith("a long"))),
(Div(cls="a long attr"), Div(cls=Contains("long"))),
(Div(cls="a long attr"), Div(cls=DoesNotContain("xxxx"))),
])
def test_matches_success_expected(value, expected):
assert matches(value, expected)

228
tests/test_hooks.py Normal file
View File

@@ -0,0 +1,228 @@
import pytest
from components.jsonviewer.hooks import (
HookContext, EventType, Hook, HookManager, HookBuilder,
WhenLongText, WhenEditable, WhenType, WhenKey, WhenPath, WhenValue,
CompositeCondition
)
# HookContext test helper
def create_mock_context(value=None, key=None, json_path=None, parent_node=None, node_type=None, children=None):
"""Helper to create a mock HookContext for testing."""
class Node:
def __init__(self, value, node_type=None, children=None):
self.value = value
self.__class__.__name__ = node_type or "MockNode"
self.children = children or []
mock_node = Node(value, node_type=node_type, children=children)
return HookContext(key=key, node=mock_node, helper=None, jsonviewer=None, json_path=json_path,
parent_node=parent_node)
# ================
# Test Conditions
# ================
@pytest.mark.parametrize("text, threshold, expected", [
("This is a very long text." * 10, 50, True), # Long text, above threshold
("Short text", 50, False), # Short text, below threshold
])
def test_i_can_detect_long_text(text, threshold, expected):
context = create_mock_context(value=text)
condition = WhenLongText(threshold=threshold)
assert condition.evaluate(context) == expected
@pytest.mark.parametrize("json_path, editable_paths, editable_types, node_value, is_leaf, expected", [
("root.editable.value", ["root.editable.value"], None, "Editable value", True, True), # Editable path matches
("root.not_editable.value", ["root.editable.value"], None, "Editable value", True, False),
# Editable path does not match
("root.editable.numeric", [], [int], 10, True, True), # Type is editable (int)
("root.editable.string", [], [int], "Non-editable value", True, False) # Type is not editable
])
def test_i_can_detect_editable(json_path, editable_paths, editable_types, node_value, is_leaf, expected):
context = create_mock_context(value=node_value, json_path=json_path)
context.is_leaf_node = lambda: is_leaf # Mock is_leaf_node behavior
condition = WhenEditable(editable_paths=editable_paths, editable_types=editable_types)
assert condition.evaluate(context) == expected
@pytest.mark.parametrize("node_value, target_type, expected", [
(123, int, True), # Matches target type
("String value", int, False) # Does not match target type
])
def test_i_can_detect_type_match(node_value, target_type, expected):
context = create_mock_context(value=node_value)
condition = WhenType(target_type=target_type)
assert condition.evaluate(context) == expected
@pytest.mark.parametrize("key, key_pattern, expected", [
("target_key", "target_key", True), # Exact match
("target_key", lambda k: k.startswith("target"), True), # Callable match
("wrong_key", "target_key", False) # Pattern does not match
])
def test_i_can_match_key(key, key_pattern, expected):
context = create_mock_context(key=key)
condition = WhenKey(key_pattern=key_pattern)
assert condition.evaluate(context) == expected
@pytest.mark.parametrize("json_path, path_pattern, expected", [
("root.items[0].name", r"root\.items\[\d+\]\.name", True), # Matches pattern
("root.invalid_path", r"root\.items\[\d+\]\.name", False) # Does not match
])
def test_i_can_match_path(json_path, path_pattern, expected):
context = create_mock_context(json_path=json_path)
condition = WhenPath(path_pattern=path_pattern)
assert condition.evaluate(context) == expected
@pytest.mark.parametrize("value, target_value, predicate, expected", [
(123, 123, None, True), # Direct match
(123, 456, None, False), # Direct mismatch
(150, None, lambda v: v > 100, True), # Satisfies predicate
(50, None, lambda v: v > 100, False), # Does not satisfy predicate
])
def test_i_can_detect_value(value, target_value, predicate, expected):
context = create_mock_context(value=value)
condition = WhenValue(target_value=target_value, predicate=predicate)
assert condition.evaluate(context) == expected
@pytest.mark.parametrize("value, conditions, operator, expected", [
(200, [WhenValue(predicate=lambda v: v > 100), WhenType(target_type=int)], "AND", True),
# Both conditions pass (AND)
(200, [WhenValue(predicate=lambda v: v > 100), WhenType(target_type=str)], "AND", False),
# One condition fails (AND)
(200, [WhenValue(predicate=lambda v: v > 100), WhenType(target_type=str)], "OR", True),
# At least one passes (OR)
(50, [], "AND", True), # No conditions (default True for AND/OR)
])
def test_i_can_combine_conditions(value, conditions, operator, expected):
context = create_mock_context(value=value)
composite = CompositeCondition(conditions=conditions, operator=operator)
assert composite.evaluate(context) == expected
# ================
# Test Hooks
# ================
@pytest.mark.parametrize("event_type, actual_event, threshold, text, expected", [
(EventType.RENDER, EventType.RENDER, 10, "Long text" * 10, True), # Event matches, meets condition
(EventType.RENDER, EventType.CLICK, 10, "Long text" * 10, False), # Event mismatch
])
def test_i_can_match_hook(event_type, actual_event, threshold, text, expected):
context = create_mock_context(value=text)
condition = WhenLongText(threshold=threshold)
hook = Hook(event_type=event_type, conditions=[condition], executor=lambda ctx: "Executed")
assert hook.matches(event_type=actual_event, context=context) == expected
# ================
# Test HookManager
# ================
def test_i_can_execute_hooks_in_manager():
hook_manager = HookManager()
# Add hooks
hook1 = Hook(EventType.RENDER, conditions=[], executor=lambda ctx: "Render Executed")
hook2 = Hook(EventType.CLICK, conditions=[], executor=lambda ctx: "Click Executed")
hook_manager.add_hook(hook1)
hook_manager.add_hook(hook2)
context = create_mock_context()
render_results = hook_manager.execute_hooks(event_type=EventType.RENDER, context=context)
click_results = hook_manager.execute_hooks(event_type=EventType.CLICK, context=context)
assert len(render_results) == 1
assert render_results[0] == "Render Executed"
assert len(click_results) == 1
assert click_results[0] == "Click Executed"
def test_i_can_clear_hooks_in_manager():
hook_manager = HookManager()
hook_manager.add_hook(Hook(EventType.RENDER, conditions=[], executor=lambda ctx: "Render"))
assert len(hook_manager.hooks) == 1
hook_manager.clear_hooks()
assert len(hook_manager.hooks) == 0
# ================
# Test HookBuilder with Callable Conditions
# ================
def test_i_can_use_callable_with_when_custom():
"""Test that when_custom() accepts callable predicates"""
# Define a simple callable condition
def custom_condition(context):
return isinstance(context.get_value(), str) and context.get_value().startswith("CUSTOM_")
# Create hook using callable condition
hook = (HookBuilder()
.on_render()
.when_custom(custom_condition)
.execute(lambda ctx: "Custom hook executed"))
# Test with matching context
matching_context = create_mock_context(value="CUSTOM_test_value")
assert hook.matches(EventType.RENDER, matching_context) == True
assert hook.execute(matching_context) == "Custom hook executed"
# Test with non-matching context
non_matching_context = create_mock_context(value="regular_value")
assert hook.matches(EventType.RENDER, non_matching_context) == False
def test_i_can_use_lambda_with_when_custom():
"""Test that when_custom() accepts lambda expressions"""
# Create hook using lambda condition
hook = (HookBuilder()
.on_render()
.when_custom(lambda ctx: ctx.key == "special" and isinstance(ctx.get_value(), int) and ctx.get_value() > 100)
.execute(lambda ctx: f"Special value: {ctx.get_value()}"))
# Test with matching context
matching_context = create_mock_context(value=150, key="special")
assert hook.matches(EventType.RENDER, matching_context) == True
assert hook.execute(matching_context) == "Special value: 150"
# Test with non-matching contexts
wrong_key_context = create_mock_context(value=150, key="normal")
assert hook.matches(EventType.RENDER, wrong_key_context) == False
wrong_value_context = create_mock_context(value=50, key="special")
assert hook.matches(EventType.RENDER, wrong_value_context) == False
@pytest.mark.parametrize("value, key, json_path, expected", [
("CUSTOM_hook_test", "test_key", "root.test", True), # Matches callable condition
("regular_text", "test_key", "root.test", False), # Doesn't match callable condition
(123, "test_key", "root.test", False), # Wrong type
])
def test_callable_condition_evaluation(value, key, json_path, expected):
"""Test callable condition evaluation with different inputs"""
def custom_callable_condition(context):
return isinstance(context.get_value(), str) and context.get_value().startswith("CUSTOM_")
hook = (HookBuilder()
.on_render()
.when_custom(custom_callable_condition)
.execute(lambda ctx: "Executed"))
context = create_mock_context(value=value, key=key, json_path=json_path)
assert hook.matches(EventType.RENDER, context) == expected

View File

@@ -1,12 +1,12 @@
import pytest
from components.debugger.components.JsonViewer import *
from components.jsonviewer.components.JsonViewer import *
from components.jsonviewer.hooks import HookBuilder
from helpers import matches, span_icon, search_elements_by_name, extract_jsonviewer_node
JSON_VIEWER_INSTANCE_ID = "json_viewer"
ML_20 = "margin-left: 20px;"
CLS_PREFIX = "mmt-jsonviewer"
USER_ID = "user_id"
dn = DictNode
ln = ListNode
@@ -15,7 +15,7 @@ n = ValueNode
@pytest.fixture()
def json_viewer(session):
return JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, {})
return JsonViewer(session, JSON_VIEWER_INSTANCE_ID, {})
@pytest.fixture()
@@ -41,7 +41,7 @@ def jv_id(x):
ln([{"a": [1, 2]}], jv_id(0), 0, [dn({"a": [1, 2]}, jv_id(1), 1, {"a": ln([1, 2], jv_id(2), 2, [n(1), n(2)])})]))
])
def test_i_can_create_node(data, expected_node):
json_viewer_ = JsonViewer(None, JSON_VIEWER_INSTANCE_ID, None, USER_ID, data)
json_viewer_ = JsonViewer(None, JSON_VIEWER_INSTANCE_ID, data)
assert json_viewer_.node == expected_node
@@ -63,7 +63,7 @@ def test_i_can_render(json_viewer):
(None, Span("null", cls=f"{CLS_PREFIX}-null")),
])
def test_i_can_render_simple_value(session, value, expected_inner):
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
actual = jsonv.__ft__()
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"{jv_id('root')}"})[0]
expected = Div(
@@ -81,7 +81,7 @@ def test_i_can_render_simple_value(session, value, expected_inner):
def test_i_can_render_expanded_list_node(session):
value = [1, "hello", True]
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Force expansion of the node
jsonv.set_folding_mode("expand")
@@ -107,7 +107,7 @@ def test_i_can_render_expanded_list_node(session):
def test_i_can_render_expanded_dict_node(session):
value = {"a": 1, "b": "hello", "c": True}
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Force expansion of the node
jsonv.set_folding_mode("expand")
@@ -133,7 +133,7 @@ def test_i_can_render_expanded_dict_node(session):
def test_i_can_render_expanded_list_of_dict_node(session):
value = [{"a": 1, "b": "hello"}]
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Force expansion of all nodes
jsonv.set_folding_mode("expand")
@@ -167,7 +167,7 @@ def test_i_can_render_expanded_list_of_dict_node(session):
def test_render_with_collapse_folding_mode(session):
# Create a nested structure to test collapse rendering
value = {"a": [1, 2, 3], "b": {"x": "y", "z": True}}
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Ensure folding mode is set to collapse (should be default)
jsonv.set_folding_mode("collapse")
@@ -195,7 +195,7 @@ def test_render_with_collapse_folding_mode(session):
def test_render_with_specific_node_expanded_in_collapse_mode(session):
# Create a nested structure to test mixed collapse/expand rendering
value = {"a": [1, 2, 3], "b": {"x": "y", "z": True}}
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Ensure folding mode is set to collapse
jsonv.set_folding_mode(FoldingMode.COLLAPSE)
@@ -230,7 +230,7 @@ def test_render_with_specific_node_expanded_in_collapse_mode(session):
def test_multiple_folding_levels_in_collapse_mode(session):
# Create a deeply nested structure
value = {"level1": {"level2": {"level3": [1, 2, 3]}}}
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Set folding mode to collapse
jsonv.set_folding_mode(FoldingMode.COLLAPSE)
@@ -262,7 +262,7 @@ def test_multiple_folding_levels_in_collapse_mode(session):
def test_toggle_between_folding_modes(session):
value = {"a": [1, 2, 3], "b": {"x": "y"}}
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
# Start with collapse mode
jsonv.set_folding_mode("collapse")
@@ -271,19 +271,19 @@ def test_toggle_between_folding_modes(session):
jsonv.set_node_folding(f"{JSON_VIEWER_INSTANCE_ID}-0", "expand")
# Verify node is in tracked nodes (exceptions to collapse mode)
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._nodes_to_track
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._folding_manager.get_nodes_to_track()
# Now switch to expand mode
jsonv.set_folding_mode("expand")
# Tracked nodes should be cleared
assert len(jsonv._nodes_to_track) == 0
assert len(jsonv._folding_manager.get_nodes_to_track()) == 0
# Collapse specific node
jsonv.set_node_folding(f"{JSON_VIEWER_INSTANCE_ID}-0", "collapse")
# Verify node is in tracked nodes (exceptions to expand mode)
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._nodes_to_track
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._folding_manager.get_nodes_to_track()
# Render and verify the output
actual = jsonv.__ft__()
@@ -297,34 +297,43 @@ def test_toggle_between_folding_modes(session):
def test_custom_hook_rendering(session, helper):
# Define a custom hook for testing
def custom_predicate(key, node, h):
return isinstance(node.value, str) and node.value == "custom_hook_test"
def custom_renderer(key, node, h):
return Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class")
hooks = [(custom_predicate, custom_renderer)]
# Create JsonViewer with the custom hook
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, "custom_hook_test", hooks=hooks)
actual = jsonv.__ft__()
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"{jv_id('root')}"})[0]
expected = Div(
Div(
None,
None,
Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class"),
style=ML_20),
id=f"{jv_id('root')}")
assert matches(to_compare, expected)
# Define a custom condition to check if the value is "custom_hook_test"
def custom_condition(context):
return isinstance(context.node.value, str) and context.node.value == "custom_hook_test"
# Define a custom executor to render the desired output
def custom_renderer(context):
return Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class")
# Build the hook using HookBuilder
hook = (HookBuilder()
.on_render()
.when_custom(custom_condition)
.execute(custom_renderer))
# Create a JsonViewer with the new hook
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, "custom_hook_test", hooks=[hook])
# Actual rendered output
actual = jsonv.__ft__()
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"{jv_id('root')}"})[0]
# Expected rendered output
expected = Div(
Div(
None,
None,
Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class"),
style=ML_20),
id=f"{jv_id('root')}"
)
# Assert that the actual output matches the expected output
assert matches(to_compare, expected)
def test_folding_mode_operations(session):
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, {"a": [1, 2, 3]})
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, {"a": [1, 2, 3]})
# Check default folding mode
assert jsonv.get_folding_mode() == "collapse"
@@ -338,11 +347,11 @@ def test_folding_mode_operations(session):
jsonv.set_node_folding(node_id, "collapse")
# Node should be in tracked nodes since it differs from the default mode
assert node_id in jsonv._nodes_to_track
assert node_id in jsonv._folding_manager.get_nodes_to_track()
# Restore to match default mode
jsonv.set_node_folding(node_id, "expand")
assert node_id not in jsonv._nodes_to_track
assert node_id not in jsonv._folding_manager.get_nodes_to_track()
@pytest.mark.parametrize("input_value, expected_output", [
@@ -353,7 +362,7 @@ def test_folding_mode_operations(session):
('', '""'), # Empty string
])
def test_add_quotes(input_value, expected_output):
result = JsonViewer.add_quotes(input_value)
result = JsonViewerHelper.add_quotes(input_value)
assert result == expected_output
@@ -367,4 +376,4 @@ def test_helper_is_sha256(helper):
assert not helper.is_sha256("a" * 63) # Too short
assert not helper.is_sha256("a" * 65) # Too long
assert not helper.is_sha256("g" * 64) # Invalid character
assert not helper.is_sha256("test") # Not a hash
assert not helper.is_sha256("test") # Not a hash

491
tests/test_preprocessor.py Normal file
View File

@@ -0,0 +1,491 @@
import pytest
from core.preprocessor import PlainTextPreprocessor, VariableParsingError, VariableProcessingError
def test_i_can_parse_empty_text():
"""Test that I can parse empty text input"""
processor = PlainTextPreprocessor()
result = processor.parse("")
assert result == []
def test_i_can_parse_text_without_variables():
"""Test that I can parse text without any variables"""
processor = PlainTextPreprocessor()
text = "This is just plain text with no variables"
result = processor.parse(text)
expected = [{
"type": "text",
"content": text,
"start": 0,
"end": len(text)
}]
assert result == expected
def test_i_can_parse_simple_variable():
"""Test that I can parse text with only a simple variable"""
processor = PlainTextPreprocessor()
text = "$variable"
result = processor.parse(text)
expected = [{
"type": "variable",
"name": "variable",
"properties": [],
"start": 0,
"end": 9
}]
assert result == expected
def test_i_can_parse_variable_with_underscores():
"""Test that I can parse variable with underscores in name"""
processor = PlainTextPreprocessor()
text = "$my_variable_name"
result = processor.parse(text)
expected = [{
"type": "variable",
"name": "my_variable_name",
"properties": [],
"start": 0,
"end": 17
}]
assert result == expected
def test_i_can_parse_variable_with_numbers():
"""Test that I can parse variable with numbers in name"""
processor = PlainTextPreprocessor()
text = "$var123"
result = processor.parse(text)
expected = [{
"type": "variable",
"name": "var123",
"properties": [],
"start": 0,
"end": 7
}]
assert result == expected
def test_i_can_parse_properties_with_underscores_and_numbers():
"""Test that I can parse property names with underscores and numbers"""
processor = PlainTextPreprocessor()
text = "$var._prop123.sub_prop_456"
result = processor.parse(text)
expected = [{
"type": "variable",
"name": "var",
"properties": ["_prop123", "sub_prop_456"],
"start": 0,
"end": 26
}]
assert result == expected
def test_i_can_parse_variable_starting_with_underscore():
"""Test that I can parse variable name starting with underscore"""
processor = PlainTextPreprocessor()
text = "$_private_var"
result = processor.parse(text)
expected = [
{
"type": "variable",
"name": "_private_var",
"properties": [],
"start": 0,
"end": 13
}
]
assert result == expected
def test_i_can_parse_variable_with_single_property():
"""Test that I can parse variable with one property"""
processor = PlainTextPreprocessor()
text = "$variable.prop"
result = processor.parse(text)
expected = [{
"type": "variable",
"name": "variable",
"properties": ["prop"],
"start": 0,
"end": 14
}]
assert result == expected
def test_i_can_parse_variable_with_multiple_properties():
"""Test that I can parse variable with multiple properties"""
processor = PlainTextPreprocessor()
text = "$variable.prop.subprop.deep"
result = processor.parse(text)
expected = [{
"type": "variable",
"name": "variable",
"properties": ["prop", "subprop", "deep"],
"start": 0,
"end": 27
}]
assert result == expected
def test_i_can_parse_text_with_variable_in_middle():
"""Test that I can parse text with variable in the middle"""
processor = PlainTextPreprocessor()
text = "project > $project_id and more"
result = processor.parse(text)
expected = [
{
"type": "text",
"content": "project > ",
"start": 0,
"end": 10
},
{
"type": "variable",
"name": "project_id",
"properties": [],
"start": 10,
"end": 21
},
{
"type": "text",
"content": " and more",
"start": 21,
"end": 30
}
]
assert result == expected
def test_i_can_parse_multiple_variables():
"""Test that I can parse text with multiple variables"""
processor = PlainTextPreprocessor()
text = "value == $variable.prop and $other_var"
result = processor.parse(text)
expected = [
{
"type": "text",
"content": "value == ",
"start": 0,
"end": 9
},
{
"type": "variable",
"name": "variable",
"properties": ["prop"],
"start": 9,
"end": 23
},
{
"type": "text",
"content": " and ",
"start": 23,
"end": 28
},
{
"type": "variable",
"name": "other_var",
"properties": [],
"start": 28,
"end": 38
}
]
assert result == expected
def test_i_can_preserve_all_whitespace():
"""Test that I can preserve all whitespace including tabs and newlines"""
processor = PlainTextPreprocessor()
text = " $var \t\n $other.prop "
result = processor.parse(text)
expected = [
{
"type": "text",
"content": " ",
"start": 0,
"end": 2
},
{
"type": "variable",
"name": "var",
"properties": [],
"start": 2,
"end": 6
},
{
"type": "text",
"content": " \t\n ",
"start": 6,
"end": 12
},
{
"type": "variable",
"name": "other",
"properties": ["prop"],
"start": 12,
"end": 23
},
{
"type": "text",
"content": " ",
"start": 23,
"end": 25
}
]
assert result == expected
def test_i_can_parse_text_with_special_characters():
"""Test that I can parse text with special characters"""
processor = PlainTextPreprocessor()
text = "Hello $user! @#%^&*()+={}[]|\\:;\"'<>?,./~`"
result = processor.parse(text)
expected = [
{
"type": "text",
"content": "Hello ",
"start": 0,
"end": 6
},
{
"type": "variable",
"name": "user",
"properties": [],
"start": 6,
"end": 11
},
{
"type": "text",
"content": "! @#%^&*()+={}[]|\\:;\"'<>?,./~`",
"start": 11,
"end": 41
}
]
assert result == expected
def test_i_can_parse_complex_expression():
"""Test that I can parse complex but valid expression"""
processor = PlainTextPreprocessor()
text = "if ($user.profile.age > 18 && $user.status == 'active') { $action.execute(); }"
result = processor.parse(text)
# Should parse successfully and find all variables
variables = [elem for elem in result if elem["type"] == "variable"]
assert len(variables) == 3
# Check variable details
assert variables[0]["name"] == "user"
assert variables[0]["properties"] == ["profile", "age"]
assert variables[1]["name"] == "user"
assert variables[1]["properties"] == ["status"]
assert variables[2]["name"] == "action"
assert variables[2]["properties"] == ["execute"]
def test_positions_are_accurate():
"""Test that element positions are accurate"""
processor = PlainTextPreprocessor()
text = "abc$var123*def"
result = processor.parse(text)
assert len(result) == 3
# Text before
assert result[0]["start"] == 0
assert result[0]["end"] == 3
assert result[0]["content"] == "abc"
# Variable
assert result[1]["start"] == 3
assert result[1]["end"] == 10
assert result[1]["name"] == "var123"
# Text after
assert result[2]["start"] == 10
assert result[2]["end"] == 14
assert result[2]["content"] == "*def"
# Error cases
def test_i_cannot_parse_dollar_alone_at_end():
"""Test that I cannot parse $ at the end of text"""
processor = PlainTextPreprocessor()
text = "Hello $"
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 7
assert "Invalid syntax" in str(exc_info.value)
# assert "Variable name missing after '$'" in str(exc_info.value)
def test_i_cannot_parse_dollar_alone_in_middle():
"""Test that I cannot parse $ alone in middle of text"""
processor = PlainTextPreprocessor()
text = "Hello $ world"
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 7
assert "Invalid syntax" in str(exc_info.value)
def test_i_cannot_parse_dot_immediately_after_dollar():
"""Test that I cannot parse $.property (dot immediately after $)"""
processor = PlainTextPreprocessor()
text = "$.property"
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 1
assert "Invalid syntax" in str(exc_info.value)
# assert "Variable name missing before '.'" in str(exc_info.value)
def test_i_cannot_parse_variable_ending_with_dot():
"""Test that I cannot parse $variable. (dot at the end)"""
processor = PlainTextPreprocessor()
text = "$variable."
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 9
assert "Invalid syntax in property name." in str(exc_info.value)
@pytest.mark.parametrize("text", ["$variable. prop", "$variable .prop", "$variable . prop"])
def test_i_cannot_parse_variable_when_space_in_variable_name(text):
"""Test that I cannot parse $variable. (dot at the end)"""
processor = PlainTextPreprocessor()
# text = "$variable. "
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 9
assert "Invalid syntax in property name." in str(exc_info.value)
def test_i_cannot_parse_variable_with_empty_property():
"""Test that I cannot parse $variable..property (empty property between dots)"""
processor = PlainTextPreprocessor()
text = "$variable..property"
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 9
assert "Invalid syntax in property name." in str(exc_info.value)
def test_i_cannot_parse_variable_ending_with_multiple_dots():
"""Test that I cannot parse $variable... (multiple dots at end)"""
processor = PlainTextPreprocessor()
text = "$variable..."
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 9
assert "Invalid syntax in property name." in str(exc_info.value)
def test_i_cannot_parse_when_consecutive_variables():
"""Test that I can parse consecutive variables without text between"""
processor = PlainTextPreprocessor()
text = "$var1$var2"
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
assert exc_info.value.position == 5
assert "Invalid syntax." in str(exc_info.value)
def test_first_error_is_reported_with_multiple_errors():
"""Test that first error is reported when multiple $ errors exist"""
processor = PlainTextPreprocessor()
text = "$ and $. and $var."
with pytest.raises(VariableParsingError) as exc_info:
processor.parse(text)
# Should report the first error ($ alone)
assert exc_info.value.position == 1
def test_i_can_preprocess_simple_variable():
"""Test preprocessing text with a simple variable"""
processor = PlainTextPreprocessor()
namespace = {"name": "John"}
result = processor.preprocess("Hello $name!", namespace)
assert result == "Hello John!"
def test_i_can_preprocess_with_properties():
"""Test preprocessing text with variable properties"""
class User:
def __init__(self):
self.profile = type('Profile', (), {'age': 25})()
processor = PlainTextPreprocessor()
namespace = {"user": User()}
result = processor.preprocess("Age: $user.profile.age", namespace)
assert result == "Age: 25"
def test_i_can_preprocess_multiple_variables():
"""Test preprocessing text with multiple variables"""
processor = PlainTextPreprocessor()
namespace = {"first": "Hello", "second": "World"}
result = processor.preprocess("$first $second!", namespace)
assert result == "Hello World!"
def test_i_can_preprocess_empty_text():
"""Test preprocessing empty text"""
processor = PlainTextPreprocessor()
namespace = {}
result = processor.preprocess("", namespace)
assert result == ""
def test_i_cannot_preprocess_undefined_variable():
"""Test preprocessing with undefined variable raises error"""
processor = PlainTextPreprocessor()
namespace = {}
with pytest.raises(VariableProcessingError) as exc_info:
processor.preprocess("$undefined_var", namespace)
assert "Variable 'undefined_var' is not defined" in str(exc_info.value)
def test_i_cannot_preprocess_invalid_property():
"""Test preprocessing with invalid property access"""
processor = PlainTextPreprocessor()
namespace = {"obj": object()}
with pytest.raises(VariableProcessingError) as exc_info:
processor.preprocess("some text $obj.invalid_prop", namespace)
assert "Invalid property 'invalid_prop' for variable 'obj'" in str(exc_info.value)
assert exc_info.value.position == 14

168
tests/test_undo_redo.py Normal file
View File

@@ -0,0 +1,168 @@
import os
import shutil
import pytest
from fasthtml.components import Div
from components.undo_redo.components.UndoRedo import UndoRedo
from components.undo_redo.constants import UndoRedoAttrs
from core.dbengine import DbEngine
from core.settings_management import SettingsManager, MemoryDbEngine
from helpers import matches, div_icon, Contains, DoesNotContain
from my_mocks import tabs_manager
DB_ENGINE_ROOT = "undo_redo_test_db"
TEST_DB_ENTRY = "TestDbEntry"
TEST_DB_KEY = "TestDbKey"
class TestCommand:
def __init__(self, value):
self.value = value
def __eq__(self, other):
if not isinstance(other, TestCommand):
return False
return self.value == other.value
def __hash__(self):
return hash(self.value)
@pytest.fixture()
def engine(session):
if os.path.exists(DB_ENGINE_ROOT):
shutil.rmtree(DB_ENGINE_ROOT)
engine = DbEngine(DB_ENGINE_ROOT)
engine.init(session["user_id"])
yield engine
shutil.rmtree(DB_ENGINE_ROOT)
@pytest.fixture()
def settings_manager(engine):
return SettingsManager(engine=engine)
@pytest.fixture
def undo_redo(session, tabs_manager, settings_manager):
return UndoRedo(session,
UndoRedo.create_component_id(session),
settings_manager=settings_manager,
tabs_manager=tabs_manager)
def init_command(session, settings_manager, undo_redo, value, on_undo=None):
settings_manager.save(session, TEST_DB_ENTRY, {TEST_DB_KEY: TestCommand(value)})
undo_redo.snapshot(UndoRedoAttrs(f"Set value to {value}", on_undo=on_undo), TEST_DB_ENTRY, TEST_DB_KEY)
def test_i_can_render(undo_redo):
actual = undo_redo.__ft__()
expected = Div(
Div(div_icon("undo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to undo."),
Div(div_icon("redo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to redo."),
id=undo_redo.get_id(),
)
assert matches(actual, expected)
def test_i_can_render_when_undoing_and_redoing(session, settings_manager, undo_redo):
init_command(session, settings_manager, undo_redo, "1")
init_command(session, settings_manager, undo_redo, "2")
actual = undo_redo.__ft__()
expected = Div(
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set value to 2'."),
Div(div_icon("redo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to redo."),
id=undo_redo.get_id(),
)
assert matches(actual, expected)
undo_redo.undo() # The command is now undone. We can redo it and undo the first command.
actual = undo_redo.__ft__()
expected = Div(
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set value to 1'."),
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Redo 'Set value to 2'."),
id=undo_redo.get_id(),
)
assert matches(actual, expected)
undo_redo.undo() # Undo again, I cannot undo anymore.
actual = undo_redo.__ft__()
expected = Div(
Div(div_icon("undo", cls=Contains("mmt-btn-disabled"))),
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled"))),
id=undo_redo.get_id(),
)
assert matches(actual, expected)
undo_redo.redo() # Redo once.
actual = undo_redo.__ft__()
expected = Div(
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled"))),
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled"))),
id=undo_redo.get_id(),
)
assert matches(actual, expected)
undo_redo.redo() # Redo a second time.
actual = undo_redo.__ft__()
expected = Div(
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled"))),
Div(div_icon("redo", cls=Contains("mmt-btn-disabled"))),
id=undo_redo.get_id(),
)
assert matches(actual, expected)
def test_values_are_correctly_reset(session, settings_manager, undo_redo):
# checks that the values are correctly returned
# Only checks that hx_swap_oob="true" is automatically put when id is present in the return
def on_undo():
current = settings_manager.get(session, TEST_DB_ENTRY, TEST_DB_KEY)
return Div(current.value, id='an_id')
init_command(session, settings_manager, undo_redo, "1", on_undo=on_undo)
init_command(session, settings_manager, undo_redo, "2", on_undo=on_undo)
self, res = undo_redo.undo()
expected = Div("1", id='an_id', hx_swap_oob="true")
assert matches(res, expected)
self, res = undo_redo.redo()
expected = Div("2", id='an_id', hx_swap_oob="true")
assert matches(res, expected)
def test_i_can_manage_when_the_entry_was_not_present(session, settings_manager, undo_redo):
def on_undo():
snapshot = settings_manager.load(session, TEST_DB_ENTRY)
if TEST_DB_KEY in snapshot:
return Div(snapshot[TEST_DB_KEY].value, id='an_id')
else:
return Div("**Not Found**", id='an_id')
init_command(session, settings_manager, undo_redo, "1", on_undo=on_undo)
self, res = undo_redo.undo()
expected = Div("**Not Found**", id='an_id', hx_swap_oob="true")
assert matches(res, expected)
def test_history_is_rewritten_when_pushing_a_command_after_undo(session, settings_manager, undo_redo):
init_command(session, settings_manager, undo_redo, "1")
init_command(session, settings_manager, undo_redo, "2")
init_command(session, settings_manager, undo_redo, "3")
undo_redo.undo()
undo_redo.undo()
init_command(session, settings_manager, undo_redo, "5")
assert len(undo_redo.history) == 3 # do not forget that history always has a default command with digest = None

View File

@@ -1,21 +1,48 @@
from unittest.mock import MagicMock
import pytest
from fastcore.basics import NotStr
from fasthtml.components import *
from fasthtml.xtend import Script
from components.undo_redo.components.UndoRedo import UndoRedo
from components.workflows.components.WorkflowDesigner import WorkflowDesigner, COMPONENT_TYPES
from components.workflows.constants import ProcessorTypes
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, Connection
from core.instance_manager import InstanceManager
from core.settings_management import SettingsManager, MemoryDbEngine
from helpers import matches, Contains
from my_mocks import tabs_manager
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
@pytest.fixture(autouse=True)
def mock_undo_redo(session):
# Create a mock UndoRedo instance
undo_redo = MagicMock(spec=UndoRedo)
# Store original get method
original_get = InstanceManager.get
def mock_get(sess, instance_id, *args, **kwargs):
if instance_id == UndoRedo.create_component_id(sess):
return undo_redo
return original_get(sess, instance_id, *args, **kwargs)
# Replace get method with our mock
InstanceManager.get = mock_get
yield undo_redo
# Restore original get method after test
InstanceManager.get = original_get
@pytest.fixture
def designer(session):
def designer(session, tabs_manager):
return WorkflowDesigner(session=session, _id=TEST_WORKFLOW_DESIGNER_ID,
settings_manager=SettingsManager(engine=MemoryDbEngine()),
tabs_manager=tabs_manager,
key=TEST_WORKFLOW_DESIGNER_ID,
designer_settings=WorkflowsDesignerSettings("Workflow Name"),
boundaries={"height": 500, "width": 800}
@@ -70,7 +97,8 @@ def test_i_can_render_no_component(designer):
actual = designer.__ft__()
expected = Div(
H1("Workflow Name"),
P("Drag components from the toolbox to the canvas to create your workflow."),
# P("Drag components from the toolbox to the canvas to create your workflow."),
Div(id=f"t_{designer.get_id()}"), # media + error message
Div(id=f"d_{designer.get_id()}"), # designer container
Div(cls="wkf-splitter"),
Div(id=f"p_{designer.get_id()}"), # properties panel
@@ -83,7 +111,7 @@ def test_i_can_render_no_component(designer):
def test_i_can_render_a_producer(designer, producer_component):
component = producer_component
actual = designer._mk_workflow_component(component)
actual = designer._mk_component(component)
expected = Div(
# input connection point
Div(cls="wkf-connection-point wkf-input-point",

View File

@@ -2,6 +2,8 @@ from unittest.mock import MagicMock
import pytest
from core.Expando import Expando
from workflow.DefaultDataPresenter import DefaultDataPresenter
from workflow.engine import WorkflowEngine, DataProcessor, DataProducer, DataFilter, DataPresenter
@@ -11,6 +13,24 @@ def engine():
return WorkflowEngine()
@pytest.fixture
def presenter_sample_data():
return Expando({
"id": 123,
"title": "My Awesome Task",
"creator": {
"id": 1,
"name": "John Doe",
"email": "john.doe@example.com"
},
"assignee": {
"id": 2,
"name": "Jane Smith",
"email": "jane.smith@example.com"
}
})
def test_empty_workflow_initialization(engine):
"""Test that a new WorkflowEngine has no processors."""
assert len(engine.processors) == 0
@@ -53,6 +73,7 @@ def test_run_simple_workflow(engine):
assert result == [1, 2, 3]
@pytest.mark.skip(reason="Not yet implemented")
def test_process_single_item(engine):
"""Test the internal _process_single_item method."""
mock_processor = MagicMock(spec=DataProcessor)
@@ -124,3 +145,21 @@ def test_branching_workflow(engine):
result = engine.run_to_list()
assert result == [1, 10, 2, 20]
def test_presenter_i_can_use_wildcards(presenter_sample_data):
presenter1 = DefaultDataPresenter("component_id", "id, creator.*")
res = presenter1.present(presenter_sample_data).as_dict()
assert res == {"id": 123, "creator.id": 1, "creator.name": "John Doe", "creator.email": "john.doe@example.com"}
def test_presenter_i_can_rename_wildcard_with_specific_override(presenter_sample_data):
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, creator.name=author_name")
res = presenter1.present(presenter_sample_data).as_dict()
assert res == {"id": 1, "email": "john.doe@example.com", "author_name": "John Doe"}
def test_presenter_i_can_manage_collisions(presenter_sample_data):
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, assignee.*=*")
with pytest.raises(ValueError, match="Collision detected for field"):
presenter1.present(presenter_sample_data).as_dict()

View File

@@ -0,0 +1,215 @@
import pytest
from core.Expando import Expando
from workflow.DefaultDataPresenter import DefaultDataPresenter
def test_i_can_present_static_mappings():
mappings_def = "field1 = renamed_1 , field2 "
presenter = DefaultDataPresenter("comp_id", mappings_def)
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
actual = presenter.present(data)
assert actual == Expando({"renamed_1": "value1", "field2": "value2"}) # field3 is removed
def test_i_can_present_implicit_renaming():
mappings_def = "root.field1="
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1"}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == {"field1": "value1"}
def test_the_latter_mappings_take_precedence():
mappings_def = "field1 = renamed_1 , field1 "
presenter = DefaultDataPresenter("comp_id", mappings_def)
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
actual = presenter.present(data)
assert actual == Expando({"field1": "value1"}) # field3 is removed
def test_i_can_present_static_mappings_with_sub_fields():
mappings_def = "root.field1 = renamed_1 , root.field2, root.sub_field.field3, root.sub_field.field4=renamed4 "
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == {"renamed_1": "value1",
"root.field2": "value2",
"root.sub_field.field3": "value3",
"renamed4": "value4"}
def test_i_can_present_dynamic_mappings():
mappings_def = "*"
presenter = DefaultDataPresenter("comp_id", mappings_def)
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
actual = presenter.present(data)
assert actual == Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
def test_i_can_present_dynamic_mappings_for_complex_data():
mappings_def = "*"
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}
},
"field5": "value5"}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == as_dict
def test_i_can_present_dynamic_mappings_with_sub_fields():
mappings_def = "root.sub_field.*"
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == {"root.sub_field.field3": "value3",
"root.sub_field.field4": "value4"}
def test_i_can_present_dynamic_mappings_with_sub_fields_and_renames():
mappings_def = "root.sub_field.*=*"
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == {"field3": "value3",
"field4": "value4"}
def test_i_can_present_dynamic_mappings_with_sub_fields_and_implicit_renames():
mappings_def = "root.sub_field.*="
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == {"field3": "value3",
"field4": "value4"}
def test_i_can_present_dynamic_mappings_and_rename_them():
mappings_def = "*=*" # does not really have effects as '*' only goes down one level
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root1": {"field1": "value1",
"field2": "value2"},
"root2": {"field3": "value3",
"field4": "value4"}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == as_dict
def test_i_can_present_static_and_dynamic_mappings():
mappings_def = "root.field1 = renamed_1, root.sub_field.*"
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}}}
data = Expando(as_dict)
actual = presenter.present(data)
assert isinstance(actual, Expando)
assert actual.as_dict() == {"renamed_1": "value1",
"root.sub_field.field3": "value3",
"root.sub_field.field4": "value4"}
def test_another_example_of_static_and_dynamic_mappings():
mappings_def = "* , field1 = renamed_1"
presenter = DefaultDataPresenter("comp_id", mappings_def)
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
actual = presenter.present(data)
assert actual == Expando({"renamed_1": "value1", "field2": "value2", "field3": "value3"}) # field3 is removed
def test_i_can_detect_conflict_when_dynamically_renaming_a_field():
mappings_def = "root_1.*=*, root_2.*=*"
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root_1": {"field1": "value1",
"field2": "value2"},
"root_2": {"field1": "value1",
"field2": "value2"}}
data = Expando(as_dict)
with pytest.raises(ValueError) as e:
presenter.present(data)
assert str(e.value) == "Collision detected for field 'field1'. It is mapped from both 'root_1.*=*' and 'root_2.*=*'."
def test_i_can_detect_declaration_error():
mappings_def = "field1 ,, field2"
presenter = DefaultDataPresenter("comp_id", mappings_def)
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
with pytest.raises(ValueError) as e:
presenter.present(data)
def test_i_can_detect_dynamic_error_declaration():
mappings_def = "root.field1.*" # field1 is not an object
presenter = DefaultDataPresenter("comp_id", mappings_def)
as_dict = {"root": {"field1": "value1",
"field2": "value2",
"sub_field": {"field3": "value3",
"field4": "value4"
}}}
data = Expando(as_dict)
with pytest.raises(ValueError) as e:
presenter.present(data)

View File

@@ -0,0 +1,78 @@
import pytest
from unittest.mock import Mock, patch
from core.Expando import Expando
from core.jira import JiraRequestTypes
from core.settings_management import SettingsManager, MemoryDbEngine
from workflow.engine import JiraDataProducer, TableDataProducer
JIRA_IMPORT_PATH = "workflow.engine.Jira"
@pytest.fixture
def mock_jira_search_1():
with patch(JIRA_IMPORT_PATH) as mock_jira_class:
mock_jira_instance = Mock()
mock_jira_instance.search.return_value = [
Expando({
"key": "TEST-1",
"fields": {
"summary": "Test Issue",
"status": {"name": "Open"},
"assignee": {"displayName": "Test User"}
}
})
]
mock_jira_class.return_value = mock_jira_instance
yield mock_jira_instance # This allows us to access the mock instance in our tests
@pytest.fixture
def mock_jira_error():
with patch(JIRA_IMPORT_PATH) as mock_jira_class:
mock_jira_instance = Mock()
mock_jira_instance.search.side_effect = Exception("Jira API Error")
mock_jira_class.return_value = mock_jira_instance
yield mock_jira_instance
def get_jira_patch(jp: JiraDataProducer):
# Create and configure the mock instance
mock_jira_instance = Mock()
if jp.request_type == JiraRequestTypes.Search.value:
mock_jira_instance.search.return_value = [
Expando({
"key": "TEST-1",
"fields": {
"summary": "Test Issue",
"status": {"name": "Open"},
"assignee": {"displayName": "Test User"}
}
})
]
else:
raise ValueError("Hello Kodjo. Unsupported request type !")
return patch(JIRA_IMPORT_PATH, return_value=mock_jira_instance)
def jira_producer(session, request_type, request, fields=None):
return JiraDataProducer(session,
SettingsManager(MemoryDbEngine()),
"component_id",
request_type=request_type,
request=request,
fields=fields)
def test_i_can_produce_jira_search(session):
data = {}
jp = jira_producer(session, JiraRequestTypes.Search, "project=key1")
with get_jira_patch(jp):
res = list(jp.process(data))
assert len(res) == 1
assert res[0].key == "TEST-1"

View File

@@ -0,0 +1,239 @@
from unittest.mock import MagicMock
import pandas as pd
import pytest
from pandas.testing import assert_frame_equal
from components.undo_redo.components.UndoRedo import UndoRedo
from components.workflows.components.WorkflowDesigner import COMPONENT_TYPES, WorkflowDesigner
from components.workflows.components.WorkflowPlayer import WorkflowPlayer, WorkflowsPlayerError
from components.workflows.constants import ProcessorTypes
from components.workflows.db_management import WorkflowComponent, Connection, ComponentState, WorkflowsDesignerSettings
from core.instance_manager import InstanceManager
from core.settings_management import SettingsManager, MemoryDbEngine
from my_mocks import tabs_manager
from workflow.engine import DataProcessorError
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
TEST_WORKFLOW_PLAYER_ID = "workflow_player_id"
@pytest.fixture(autouse=True)
def mock_undo_redo(session):
# Create a mock UndoRedo instance
undo_redo = MagicMock(spec=UndoRedo)
# Store original get method
original_get = InstanceManager.get
def mock_get(sess, instance_id, *args, **kwargs):
if instance_id == UndoRedo.create_component_id(sess):
return undo_redo
return original_get(sess, instance_id, *args, **kwargs)
# Replace get method with our mock
InstanceManager.get = mock_get
yield undo_redo
# Restore original get method after test
InstanceManager.get = original_get
@pytest.fixture
def settings_manager():
return SettingsManager(MemoryDbEngine())
@pytest.fixture
def designer(session, settings_manager, tabs_manager):
components = [
WorkflowComponent(
"comp_producer",
ProcessorTypes.Producer,
10, 100,
COMPONENT_TYPES[ProcessorTypes.Producer]["title"],
COMPONENT_TYPES[ProcessorTypes.Producer]["description"],
{"processor_name": "Repository"}
),
WorkflowComponent(
"comp_filter",
ProcessorTypes.Filter,
40, 100,
COMPONENT_TYPES[ProcessorTypes.Filter]["title"],
COMPONENT_TYPES[ProcessorTypes.Filter]["description"],
{"processor_name": "Default"}
),
WorkflowComponent(
"comp_presenter",
ProcessorTypes.Presenter,
70, 100,
COMPONENT_TYPES[ProcessorTypes.Presenter]["title"],
COMPONENT_TYPES[ProcessorTypes.Presenter]["description"],
{"processor_name": "Default"}
)
]
connections = [
Connection("conn_1", "comp_producer", "comp_filter"),
Connection("conn_2", "comp_filter", "comp_presenter"),
]
designer = WorkflowDesigner(
session,
TEST_WORKFLOW_DESIGNER_ID,
settings_manager,
tabs_manager,
"Workflow Designer",
WorkflowsDesignerSettings(workflow_name="Test Workflow"),
{"height": 500, "width": 800}
)
designer._state.components = {c.id: c for c in components}
designer._state.connections = connections
return designer
@pytest.fixture
def player(session, settings_manager, tabs_manager, designer):
"""
Sets up a standard WorkflowPlayer instance with a 3-component linear workflow.
A helper method 'get_dataframe' is attached for easier testing.
"""
return WorkflowPlayer(session=session,
_id=TEST_WORKFLOW_PLAYER_ID,
settings_manager=settings_manager,
tabs_manager=tabs_manager,
designer=designer,
boundaries={"height": 500, "width": 800}
)
def test_run_successful_workflow(player, mocker):
"""
Tests the "happy path" where the workflow runs successfully from start to finish.
"""
# 1. Arrange: Mock a successful engine run
mock_engine = MagicMock()
mock_engine.has_error = False
mock_result_data = [
MagicMock(as_dict=lambda: {'col_a': 1, 'col_b': 'x'}),
MagicMock(as_dict=lambda: {'col_a': 2, 'col_b': 'y'})
]
mock_engine.run_to_list.return_value = mock_result_data
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
# 2. Act
player.run()
# 3. Assert: Check for success state and correct data
assert not player.has_error
assert player.global_error is None
for component_id, state in player.runtime_states.items():
assert state.state == ComponentState.SUCCESS
player._get_engine.assert_called_once()
mock_engine.run_to_list.assert_called_once()
expected_df = pd.DataFrame([row.as_dict() for row in mock_result_data])
assert_frame_equal(player.get_dataframe(), expected_df)
def test_run_with_cyclical_dependency(player, mocker):
"""
Tests that a workflow with a cycle is detected and handled before execution.
"""
# 1. Arrange: Introduce a cycle and spy on engine creation
player._designer._state.connections.append(Connection("conn_3", "comp_presenter", "comp_producer"))
spy_get_engine = mocker.spy(player, '_get_engine')
# 2. Act
player.run()
# 3. Assert: Check for the specific cycle error
assert player.has_error
assert "Workflow configuration error: A cycle was detected" in player.global_error
spy_get_engine.assert_not_called()
def test_run_with_component_initialization_failure(player, mocker):
"""
Tests that an error during a component's initialization is handled correctly.
"""
# 1. Arrange: Make the engine creation fail for a specific component
failing_component_id = "comp_filter"
error = ValueError("Missing a required property")
mocker.patch.object(player, '_get_engine', side_effect=WorkflowsPlayerError(failing_component_id, error))
# 2. Act
player.run()
# 3. Assert: Check that the specific component is marked as failed
assert player.has_error
assert f"Failed to init component '{failing_component_id}'" in player.global_error
assert player.runtime_states[failing_component_id].state == ComponentState.FAILURE
assert str(error) in player.runtime_states[failing_component_id].error_message
assert player.runtime_states["comp_producer"].state == ComponentState.NOT_RUN
def test_run_with_failure_in_middle_component(player, mocker):
"""
Tests failure in a middle component updates all component states correctly.
"""
# 1. Arrange: Mock an engine that fails at the filter component
mock_engine = MagicMock()
mock_engine.has_error = True
failing_component_id = "comp_filter"
error = RuntimeError("Data processing failed unexpectedly")
mock_engine.errors = {failing_component_id: DataProcessorError(failing_component_id, error)}
mock_engine.run_to_list.return_value = []
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
# 2. Act
player.run()
# 3. Assert: Check the state of each component in the chain
assert player.has_error
assert f"Error in component 'comp_filter':" in player.global_error
assert player.runtime_states["comp_producer"].state == ComponentState.SUCCESS
assert player.runtime_states[failing_component_id].state == ComponentState.FAILURE
assert str(error) in player.runtime_states[failing_component_id].error_message
assert player.runtime_states["comp_presenter"].state == ComponentState.NOT_RUN
def test_run_with_empty_workflow(player, mocker):
"""
Tests that running a workflow with no components completes without errors.
"""
# 1. Arrange: Clear components and connections
player._designer._state.components = {}
player._designer._state.connections = []
spy_get_engine = mocker.spy(player, '_get_engine')
# 2. Act
player.run()
# 3. Assert: Ensure it finishes cleanly with no data
assert not player.has_error
assert player.global_error == 'No connections defined.'
spy_get_engine.assert_not_called()
def test_run_with_global_engine_error(player, mocker):
"""
Tests a scenario where the engine reports a global error not tied to a specific component.
"""
# 1. Arrange: Mock a global engine failure
mock_engine = MagicMock()
mock_engine.has_error = True
mock_engine.errors = {} # No specific component error
mock_engine.global_error = "A simulated global engine failure"
mock_engine.run_to_list.return_value = []
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
# 2. Act
player.run()
# 3. Assert: The player should report the global error from the engine
assert player.has_error
assert player.global_error == mock_engine.global_error

View File

@@ -4,41 +4,16 @@ import pytest
from fasthtml.components import *
from components.form.components.MyForm import FormField, MyForm
from components.tabs.components.MyTabs import MyTabs
from components.undo_redo.components.UndoRedo import UndoRedo
from components.workflows.components.Workflows import Workflows
from core.instance_manager import InstanceManager
from core.settings_management import SettingsManager, MemoryDbEngine
from helpers import matches, div_icon, search_elements_by_name, Contains
from my_mocks import tabs_manager
TEST_WORKFLOWS_ID = "testing_repositories_id"
@pytest.fixture
def tabs_manager():
class MockTabsManager(MagicMock):
def __init__(self, *args, **kwargs):
super().__init__(*args, spec=MyTabs, **kwargs)
self.request_new_tab_id = MagicMock(side_effect =["new_tab_id", "new_tab_2", "new_tab_3", StopIteration])
self.tabs = {}
self.tabs_by_key = {}
def add_tab(self, title, content, key: str | tuple = None, tab_id: str = None, icon=None):
self.tabs[tab_id] = (title, content)
self.tabs_by_key[key] = (title, content)
def set_tab_content(self, tab_id, content, title=None, key: str | tuple = None, active=None):
self.tabs[tab_id] = (title, content)
self.tabs_by_key[key] = (title, content)
def refresh(self):
return Div(
Div(
[Div(title) for title in self.tabs.keys()]
),
list(self.tabs.values())[-1]
)
return MockTabsManager()
boundaries = {"height": 500, "width": 800}
@pytest.fixture
def workflows(session, tabs_manager):
@@ -47,6 +22,28 @@ def workflows(session, tabs_manager):
tabs_manager=tabs_manager)
@pytest.fixture(autouse=True)
def mock_undo_redo(session):
# Create a mock UndoRedo instance
undo_redo = MagicMock(spec=UndoRedo)
# Store original get method
original_get = InstanceManager.get
def mock_get(sess, instance_id, *args, **kwargs):
if instance_id == UndoRedo.create_component_id(sess):
return undo_redo
return original_get(sess, instance_id, *args, **kwargs)
# Replace get method with our mock
InstanceManager.get = mock_get
yield undo_redo
# Restore original get method after test
InstanceManager.get = original_get
def test_render_no_workflow(workflows):
actual = workflows.__ft__()
expected = Div(
@@ -117,7 +114,7 @@ def test_i_can_add_a_new_workflow(workflows, tabs_manager):
res = workflows.request_new_workflow()
tab_id = list(res.tabs.keys())[0]
actual = workflows.add_new_workflow(tab_id, "Not relevant here", "New Workflow", {})
actual = workflows.add_new_workflow(tab_id, "Not relevant here", "New Workflow", boundaries)
expected = (
Div(
@@ -134,11 +131,11 @@ def test_i_can_add_a_new_workflow(workflows, tabs_manager):
def test_i_can_select_a_workflow(workflows):
workflows.add_new_workflow("tab_id_1", "Not relevant", "workflow 1", {})
workflows.add_new_workflow("tab_id_2", "Not relevant", "workflow 2", {})
workflows.add_new_workflow("tab_id_3", "Not relevant", "workflow 3", {})
workflows.add_new_workflow("tab_id_1", "Not relevant", "workflow 1", boundaries)
workflows.add_new_workflow("tab_id_2", "Not relevant", "workflow 2", boundaries)
workflows.add_new_workflow("tab_id_3", "Not relevant", "workflow 3", boundaries)
actual = workflows.show_workflow("workflow 2", {})
actual = workflows.show_workflow("workflow 2", boundaries)
expected = (
Div(
@@ -150,4 +147,4 @@ def test_i_can_select_a_workflow(workflows):
Div(), # Workflow Designer embedded in the tab
)
assert matches(actual, expected)
assert matches(actual, expected)