Compare commits
10 Commits
03ed1af7e6
...
a0cf5aff0c
| Author | SHA1 | Date | |
|---|---|---|---|
| a0cf5aff0c | |||
| d064a553dd | |||
| 6f17f6ee1f | |||
| ed793995fb | |||
| f3deeaefd1 | |||
| fdf05edec3 | |||
| bdd954b243 | |||
| 2754312141 | |||
| d0f7536fa0 | |||
| 2b288348e2 |
@@ -31,6 +31,7 @@ pydantic-settings==2.9.1
|
||||
pydantic_core==2.33.2
|
||||
Pygments==2.19.1
|
||||
pytest==8.3.3
|
||||
pytest-mock==3.14.1
|
||||
python-dateutil==2.9.0.post0
|
||||
python-dotenv==1.0.1
|
||||
python-fasthtml==0.12.21
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
const tooltipElementId = "mmt-app"
|
||||
|
||||
function bindTooltipsWithDelegation() {
|
||||
// To display the tooltip, the attribute 'data-tooltip' is mandatory => it contains the text to tooltip
|
||||
// Then
|
||||
// the 'truncate' to show only when the text is truncated
|
||||
// the class 'mmt-tooltip' for force the display
|
||||
|
||||
const elementId = tooltipElementId
|
||||
console.debug("bindTooltips on element " + elementId);
|
||||
|
||||
|
||||
@@ -48,4 +48,22 @@ def post(session, _id: str, content: str):
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.ImportHolidays} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.import_holidays()
|
||||
return instance.import_holidays()
|
||||
|
||||
@rt(Routes.ConfigureJira)
|
||||
def get(session, _id: str, boundaries: str):
|
||||
logger.debug(f"Entering {Routes.ConfigureJira} - GET with args {debug_session(session)}, {_id=}, {boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.show_configure_jira(json.loads(boundaries) if boundaries else None)
|
||||
|
||||
@rt(Routes.ConfigureJira)
|
||||
def post(session, _id: str, args: dict):
|
||||
logger.debug(f"Entering {Routes.ConfigureJira} - POST with args {debug_session(session)}, {_id=}, {args=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.update_jira_settings(args)
|
||||
|
||||
@rt(Routes.ConfigureJiraCancel)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.ConfigureJiraCancel} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.cancel_jira_settings()
|
||||
@@ -23,9 +23,16 @@ class AiBuddySettingsEntry:
|
||||
self.ollama_port = port
|
||||
|
||||
|
||||
@dataclass()
|
||||
class JiraSettingsEntry:
|
||||
user_name: str = ""
|
||||
api_token: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdminSettings:
|
||||
ai_buddy: AiBuddySettingsEntry = field(default_factory=AiBuddySettingsEntry)
|
||||
jira: JiraSettingsEntry = field(default_factory=JiraSettingsEntry)
|
||||
|
||||
|
||||
class AdminDbManager:
|
||||
@@ -37,3 +44,8 @@ class AdminDbManager:
|
||||
ADMIN_SETTINGS_ENTRY,
|
||||
AdminSettings,
|
||||
"ai_buddy")
|
||||
self.jira = NestedSettingsManager(session,
|
||||
settings_manager,
|
||||
ADMIN_SETTINGS_ENTRY,
|
||||
AdminSettings,
|
||||
"jira")
|
||||
|
||||
10
src/components/admin/assets/icons.py
Normal file
10
src/components/admin/assets/icons.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
icon_jira = NotStr("""<svg name="jara" viewBox="0 0 48 48" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<style>.a{fill:none;stroke:#000000;stroke-linecap:round;stroke-linejoin:round;}</style>
|
||||
</defs>
|
||||
<path class="a" d="M5.5,22.9722h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556A8.7361,8.7361,0,0,0,25.0278,42.5h0V22.9722Z"/>
|
||||
<path class="a" d="M14.2361,14.2361h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556a8.7361,8.7361,0,0,0,8.7361,8.7361h0V14.2361Z"/>
|
||||
<path class="a" d="M22.9722,5.5h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556A8.7361,8.7361,0,0,0,42.5,25.0278h0V5.5Z"/>
|
||||
</svg>""")
|
||||
@@ -38,7 +38,31 @@ class AdminCommandManager(BaseCommandManager):
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", boundaries: getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
|
||||
def show_configure_jira(self):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.ConfigureJira}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", boundaries: getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
def save_configure_jira(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJira}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
# The form adds the rest
|
||||
}
|
||||
|
||||
def cancel_configure_jira(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJiraCancel}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
class ImportHolidaysCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
|
||||
@@ -4,10 +4,11 @@ from ai.mcp_client import MPC_CLIENTS_IDS
|
||||
from ai.mcp_tools import MCPServerTools
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.admin.admin_db_manager import AdminDbManager
|
||||
from components.admin.assets.icons import icon_jira
|
||||
from components.admin.commands import AdminCommandManager
|
||||
from components.admin.components.AdminForm import AdminFormItem, AdminFormType, AdminForm
|
||||
from components.admin.components.ImportHolidays import ImportHolidays
|
||||
from components.admin.constants import ADMIN_INSTANCE_ID, ADMIN_AI_BUDDY_INSTANCE_ID, ADMIN_IMPORT_HOLIDAYS_INSTANCE_ID
|
||||
from components.admin.constants import ADMIN_INSTANCE_ID, ADMIN_AI_BUDDY_INSTANCE_ID, ADMIN_JIRA_INSTANCE_ID
|
||||
from components.aibuddy.assets.icons import icon_brain_ok
|
||||
from components.hoildays.assets.icons import icon_holidays
|
||||
from components.tabs.components.MyTabs import MyTabs
|
||||
@@ -59,6 +60,30 @@ class Admin(BaseComponent):
|
||||
|
||||
return self._add_tab(ADMIN_AI_BUDDY_INSTANCE_ID, "Admin - Import Holidays", form)
|
||||
|
||||
def show_configure_jira(self, boundaries):
|
||||
fields = [
|
||||
AdminFormItem('user_name', "Email", "Email used to connect to JIRA.", AdminFormType.TEXT),
|
||||
AdminFormItem("api_token", "API Key", "API Key to connect to JIRA.", AdminFormType.TEXT),
|
||||
]
|
||||
hooks = {
|
||||
"on_ok": self.commands.save_configure_jira(),
|
||||
"on_cancel": self.commands.cancel_configure_jira(),
|
||||
"ok_title": "Apply"
|
||||
}
|
||||
|
||||
form = InstanceManager.get(self._session,
|
||||
AdminForm.create_component_id(self._session, prefix=self._id),
|
||||
AdminForm,
|
||||
owner=self,
|
||||
title="Jira Configuration Page",
|
||||
obj=self.db.jira,
|
||||
form_fields=fields,
|
||||
hooks=hooks,
|
||||
key=ADMIN_JIRA_INSTANCE_ID,
|
||||
boundaries=boundaries
|
||||
)
|
||||
return self._add_tab(ADMIN_JIRA_INSTANCE_ID, "Admin - Jira Configuration", form)
|
||||
|
||||
def update_ai_buddy_settings(self, values: dict):
|
||||
values = self.manage_lists(values)
|
||||
self.db.ai_buddy.update(values, ignore_missing=True)
|
||||
@@ -69,6 +94,17 @@ class Admin(BaseComponent):
|
||||
self.tabs_manager.remove_tab(tab_id)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
def update_jira_settings(self, values: dict):
|
||||
values = self.manage_lists(values)
|
||||
self.db.jira.update(values, ignore_missing=True)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
def cancel_jira_settings(self):
|
||||
tab_id = self.tabs_manager.get_tab_id(ADMIN_JIRA_INSTANCE_ID)
|
||||
self.tabs_manager.remove_tab(tab_id)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
Div(cls="divider"),
|
||||
@@ -84,6 +120,11 @@ class Admin(BaseComponent):
|
||||
mk_ellipsis("holidays", cls="text-sm", **self.commands.show_import_holidays()),
|
||||
cls="flex p-0 min-h-0 truncate",
|
||||
),
|
||||
Div(
|
||||
mk_icon(icon_jira, can_select=False),
|
||||
mk_ellipsis("jira", cls="text-sm", **self.commands.show_configure_jira()),
|
||||
cls="flex p-0 min-h-0 truncate",
|
||||
),
|
||||
#
|
||||
# cls=""),
|
||||
# Script(f"bindAdmin('{self._id}')"),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
ADMIN_INSTANCE_ID = "__Admin__"
|
||||
ADMIN_AI_BUDDY_INSTANCE_ID = "__AdminAIBuddy__"
|
||||
ADMIN_IMPORT_HOLIDAYS_INSTANCE_ID = "__AdminImportHolidays__"
|
||||
ADMIN_JIRA_INSTANCE_ID = "__AdminJira__"
|
||||
ROUTE_ROOT = "/admin"
|
||||
ADMIN_SETTINGS_ENTRY = "Admin"
|
||||
|
||||
@@ -8,4 +9,6 @@ class Routes:
|
||||
AiBuddy = "/ai-buddy"
|
||||
AiBuddyCancel = "/ai-buddy-cancel"
|
||||
ImportHolidays = "/import-holidays"
|
||||
PasteHolidays = "/paste-holidays"
|
||||
PasteHolidays = "/paste-holidays"
|
||||
ConfigureJira = "/configure-jira"
|
||||
ConfigureJiraCancel = "/configure-jira-cancel"
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from fasthtml.components import Html
|
||||
from fasthtml.components import *
|
||||
from fasthtml.xtend import Script
|
||||
|
||||
|
||||
@@ -4,10 +4,12 @@
|
||||
|
||||
using `_id={WORKFLOW_DESIGNER_INSTANCE_ID}{session['user_id']}{get_unique_id()}`
|
||||
|
||||
| Name | value |
|
||||
|---------------|------------------|
|
||||
| Canvas | `c_{self._id}` |
|
||||
| Designer | `d_{self._id}` |
|
||||
| Error Message | `err_{self._id}` |
|
||||
| Properties | `p_{self._id}` |
|
||||
| Spliter | `s_{self._id}` |
|
||||
| Name | value |
|
||||
|-----------------|--------------------|
|
||||
| Canvas | `c_{self._id}` |
|
||||
| Designer | `d_{self._id}` |
|
||||
| Error Message | `err_{self._id}` |
|
||||
| Properties | `p_{self._id}` |
|
||||
| Spliter | `s_{self._id}` |
|
||||
| Top element | `t_{self._id}` |
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ def post(session, _id: str, component_type: str, x: int, y: int):
|
||||
|
||||
|
||||
@rt(Routes.MoveComponent)
|
||||
def post(session, _id: str, component_id: str, x: int, y: int):
|
||||
def post(session, _id: str, component_id: str, x: float, y: float):
|
||||
logger.debug(
|
||||
f"Entering {Routes.MoveComponent} with args {debug_session(session)}, {_id=}, {component_id=}, {x=}, {y=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
@@ -133,3 +133,10 @@ def post(session, _id: str, tab_boundaries: str):
|
||||
f"Entering {Routes.PlayWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.play_workflow(json.loads(tab_boundaries))
|
||||
|
||||
@rt(Routes.StopWorkflow)
|
||||
def post(session, _id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.StopWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.stop_workflow()
|
||||
@@ -51,12 +51,17 @@
|
||||
|
||||
.wkf-canvas {
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
background-image:
|
||||
linear-gradient(rgba(0,0,0,.1) 1px, transparent 1px),
|
||||
linear-gradient(90deg, rgba(0,0,0,.1) 1px, transparent 1px);
|
||||
background-size: 20px 20px;
|
||||
}
|
||||
|
||||
.wkf-canvas-error {
|
||||
border: 3px solid var(--color-error);
|
||||
}
|
||||
|
||||
.wkf-toolbox {
|
||||
min-height: 230px;
|
||||
width: 8rem; /* w-32 (32 * 0.25rem = 8rem) */
|
||||
@@ -89,6 +94,11 @@
|
||||
transition: none;
|
||||
}
|
||||
|
||||
.wkf-workflow-component.error {
|
||||
background: var(--color-error);
|
||||
}
|
||||
|
||||
|
||||
.wkf-component-content {
|
||||
padding: 0.75rem; /* p-3 in Tailwind */
|
||||
border-radius: 0.5rem; /* rounded-lg in Tailwind */
|
||||
@@ -99,6 +109,12 @@
|
||||
align-items: center; /* items-center in Tailwind */
|
||||
}
|
||||
|
||||
.wkf-component-content.error {
|
||||
background: var(--color-error);
|
||||
}
|
||||
|
||||
.wkf-component-content.not-run {
|
||||
}
|
||||
|
||||
.wkf-connection-line {
|
||||
position: absolute;
|
||||
|
||||
@@ -81,7 +81,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.PauseWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def stop_workflow(self):
|
||||
@@ -89,7 +89,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.StopWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from components.workflows.commands import WorkflowDesignerCommandManager
|
||||
from components.workflows.components.WorkflowPlayer import WorkflowPlayer
|
||||
from components.workflows.constants import WORKFLOW_DESIGNER_INSTANCE_ID, ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, \
|
||||
Connection, WorkflowsDesignerDbManager, WorkflowsPlayerSettings
|
||||
Connection, WorkflowsDesignerDbManager, ComponentState
|
||||
from components_helpers import apply_boundaries, mk_tooltip, mk_dialog_buttons, mk_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.utils import get_unique_id, make_safe_id
|
||||
@@ -64,6 +64,16 @@ class WorkflowDesigner(BaseComponent):
|
||||
self._state = self._db.load_state(key)
|
||||
self._boundaries = boundaries
|
||||
self.commands = WorkflowDesignerCommandManager(self)
|
||||
|
||||
workflow_name = self._designer_settings.workflow_name
|
||||
self._player = InstanceManager.get(self._session,
|
||||
WorkflowPlayer.create_component_id(self._session, workflow_name),
|
||||
WorkflowPlayer,
|
||||
settings_manager=self._settings_manager,
|
||||
tabs_manager=self.tabs_manager,
|
||||
designer=self,
|
||||
boundaries=boundaries)
|
||||
|
||||
self._error_message = None
|
||||
|
||||
def set_boundaries(self, boundaries: dict):
|
||||
@@ -72,8 +82,8 @@ class WorkflowDesigner(BaseComponent):
|
||||
def refresh_designer(self):
|
||||
return self._mk_elements()
|
||||
|
||||
def refresh_properties(self):
|
||||
return self._mk_properties()
|
||||
def refresh_properties(self, oob=False):
|
||||
return self._mk_properties(oob)
|
||||
|
||||
def add_component(self, component_type, x, y):
|
||||
self._state.component_counter += 1
|
||||
@@ -97,11 +107,12 @@ class WorkflowDesigner(BaseComponent):
|
||||
|
||||
def move_component(self, component_id, x, y):
|
||||
if component_id in self._state.components:
|
||||
self._state.selected_component_id = component_id
|
||||
self._state.components[component_id].x = int(x)
|
||||
self._state.components[component_id].y = int(y)
|
||||
self._db.save_state(self._key, self._state) # update db
|
||||
|
||||
return self.refresh_designer()
|
||||
return self.refresh_designer(), self.refresh_properties(True)
|
||||
|
||||
def delete_component(self, component_id):
|
||||
# Remove component
|
||||
@@ -177,25 +188,25 @@ class WorkflowDesigner(BaseComponent):
|
||||
return self.refresh_properties()
|
||||
|
||||
def play_workflow(self, boundaries: dict):
|
||||
if self._state.selected_component_id is None:
|
||||
return self.error_message("No component selected")
|
||||
self._error_message = None
|
||||
|
||||
workflow_name = self._designer_settings.workflow_name
|
||||
player = InstanceManager.get(self._session,
|
||||
WorkflowPlayer.create_component_id(self._session, workflow_name),
|
||||
WorkflowPlayer,
|
||||
settings_manager=self._settings_manager,
|
||||
tabs_manager=self.tabs_manager,
|
||||
player_settings=WorkflowsPlayerSettings(workflow_name,
|
||||
list(self._state.components.values())),
|
||||
boundaries=boundaries)
|
||||
try:
|
||||
player.run()
|
||||
self.tabs_manager.add_tab(f"Workflow {workflow_name}", player, player.key)
|
||||
return self.tabs_manager.refresh()
|
||||
self._player.run()
|
||||
if self._player.global_error:
|
||||
# Show the error message in the same tab
|
||||
self._error_message = self._player.global_error
|
||||
|
||||
except Exception as e:
|
||||
return self.error_message(str(e))
|
||||
else:
|
||||
|
||||
# change the tab and display the results
|
||||
self._player.set_boundaries(boundaries)
|
||||
self.tabs_manager.add_tab(f"Workflow {self._designer_settings.workflow_name}", self._player, self._player.key)
|
||||
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def stop_workflow(self):
|
||||
self._error_message = None
|
||||
self._player.stop()
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def on_processor_details_event(self, component_id: str, event_name: str, details: dict):
|
||||
if component_id in self._state.components:
|
||||
@@ -207,9 +218,14 @@ class WorkflowDesigner(BaseComponent):
|
||||
|
||||
return self.refresh_properties()
|
||||
|
||||
def error_message(self, message: str):
|
||||
self._error_message = message
|
||||
return self.tabs_manager.refresh()
|
||||
def get_workflow_name(self):
|
||||
return self._designer_settings.workflow_name
|
||||
|
||||
def get_workflow_components(self):
|
||||
return self._state.components.values()
|
||||
|
||||
def get_workflow_connections(self):
|
||||
return self._state.connections
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
@@ -218,7 +234,8 @@ class WorkflowDesigner(BaseComponent):
|
||||
Div(
|
||||
self._mk_media(),
|
||||
self._mk_error_message(),
|
||||
cls="flex mb-2"
|
||||
cls="flex mb-2",
|
||||
id=f"t_{self._id}"
|
||||
),
|
||||
self._mk_designer(),
|
||||
Div(cls="wkf-splitter", id=f"s_{self._id}"),
|
||||
@@ -259,19 +276,62 @@ class WorkflowDesigner(BaseComponent):
|
||||
</svg>
|
||||
"""
|
||||
|
||||
def _mk_component(self, component: WorkflowComponent):
|
||||
|
||||
runtime_state = self._player.get_component_runtime_state(component.id)
|
||||
|
||||
info = COMPONENT_TYPES[component.type]
|
||||
is_selected = self._state.selected_component_id == component.id
|
||||
tooltip_content = None
|
||||
tooltip_class = ""
|
||||
|
||||
if runtime_state.state == ComponentState.FAILURE:
|
||||
state_class = 'error' # To be styled with a red highlight
|
||||
tooltip_content = runtime_state.error_message
|
||||
tooltip_class = "mmt-tooltip"
|
||||
elif runtime_state.state == ComponentState.NOT_RUN:
|
||||
state_class = 'not-run' # To be styled as greyed-out
|
||||
else:
|
||||
state_class = ''
|
||||
|
||||
return Div(
|
||||
# Input connection point
|
||||
Div(cls="wkf-connection-point wkf-input-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="input"),
|
||||
|
||||
# Component content
|
||||
Div(
|
||||
Span(info["icon"], cls="text-xl mb-1"),
|
||||
H4(component.title, cls="font-semibold text-xs"),
|
||||
cls=f"wkf-component-content {info['color']} {state_class}"
|
||||
),
|
||||
|
||||
# Output connection point
|
||||
Div(cls="wkf-connection-point wkf-output-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="output"),
|
||||
|
||||
cls=f"wkf-workflow-component w-32 {'selected' if is_selected else ''} {tooltip_class}",
|
||||
style=f"left: {component.x}px; top: {component.y}px;",
|
||||
data_component_id=component.id,
|
||||
data_tooltip=tooltip_content,
|
||||
draggable="true"
|
||||
)
|
||||
|
||||
def _mk_elements(self):
|
||||
return Div(
|
||||
# Render connections
|
||||
*[NotStr(self._mk_connection_svg(conn)) for conn in self._state.connections],
|
||||
|
||||
# Render components
|
||||
*[self._mk_workflow_component(comp) for comp in self._state.components.values()],
|
||||
*[self._mk_component(comp) for comp in self._state.components.values()],
|
||||
)
|
||||
|
||||
def _mk_canvas(self, oob=False):
|
||||
return Div(
|
||||
self._mk_elements(),
|
||||
cls="wkf-canvas flex-1 rounded-lg border flex-1",
|
||||
cls=f"wkf-canvas flex-1 rounded-lg border flex-1 {'wkf-canvas-error' if self._error_message else ''}",
|
||||
id=f"c_{self._id}",
|
||||
hx_swap_oob='true' if oob else None,
|
||||
),
|
||||
@@ -291,7 +351,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
self._mk_toolbox(), # (Left side)
|
||||
self._mk_canvas(), # (Right side)
|
||||
|
||||
cls="wkf-designer flex gap-4",
|
||||
cls="wkf-designer flex gap-1",
|
||||
id=f"d_{self._id}",
|
||||
style=f"height:{self._state.designer_height}px;"
|
||||
)
|
||||
@@ -299,8 +359,8 @@ class WorkflowDesigner(BaseComponent):
|
||||
def _mk_media(self):
|
||||
return Div(
|
||||
mk_icon(icon_play, cls="mr-1", **self.commands.play_workflow()),
|
||||
mk_icon(icon_pause, cls="mr-1", **self.commands.play_workflow()),
|
||||
mk_icon(icon_stop, cls="mr-1", **self.commands.play_workflow()),
|
||||
mk_icon(icon_pause, cls="mr-1", **self.commands.pause_workflow()),
|
||||
mk_icon(icon_stop, cls="mr-1", **self.commands.stop_workflow()),
|
||||
cls=f"media-controls flex m-2"
|
||||
)
|
||||
|
||||
@@ -372,11 +432,12 @@ class WorkflowDesigner(BaseComponent):
|
||||
Script(f"bindFormData('f_{self._id}_{component_id}');")
|
||||
)
|
||||
|
||||
def _mk_properties(self):
|
||||
def _mk_properties(self, oob=False):
|
||||
return Div(
|
||||
self._mk_properties_details(self._state.selected_component_id),
|
||||
cls="p-2 bg-base-100 rounded-lg border",
|
||||
style=f"height:{self._get_properties_height()}px;",
|
||||
hx_swap_oob='true' if oob else None,
|
||||
id=f"p_{self._id}",
|
||||
)
|
||||
|
||||
@@ -470,7 +531,8 @@ class WorkflowDesigner(BaseComponent):
|
||||
value=component.properties.get("columns", ""),
|
||||
placeholder="Columns to display, separated by comma",
|
||||
cls="input w-full"),
|
||||
P("Comma separated list of columns to display. Use * to display all columns, source=dest to rename columns."),
|
||||
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
|
||||
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
@@ -501,30 +563,3 @@ class WorkflowDesigner(BaseComponent):
|
||||
draggable="true",
|
||||
data_type=component_type
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _mk_workflow_component(component: WorkflowComponent):
|
||||
info = COMPONENT_TYPES[component.type]
|
||||
return Div(
|
||||
# Input connection point
|
||||
Div(cls="wkf-connection-point wkf-input-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="input"),
|
||||
|
||||
# Component content
|
||||
Div(
|
||||
Span(info["icon"], cls="text-xl mb-1"),
|
||||
H4(component.title, cls="font-semibold text-xs"),
|
||||
cls=f"wkf-component-content {info['color']}"
|
||||
),
|
||||
|
||||
# Output connection point
|
||||
Div(cls="wkf-connection-point wkf-output-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="output"),
|
||||
|
||||
cls="wkf-workflow-component w-32",
|
||||
style=f"left: {component.x}px; top: {component.y}px;",
|
||||
data_component_id=component.id,
|
||||
draggable="true"
|
||||
)
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pandas as pd
|
||||
from fasthtml.components import *
|
||||
|
||||
@@ -6,10 +9,12 @@ from components.datagrid_new.components.DataGrid import DataGrid
|
||||
from components.datagrid_new.settings import DataGridSettings
|
||||
from components.workflows.commands import WorkflowPlayerCommandManager
|
||||
from components.workflows.constants import WORKFLOW_PLAYER_INSTANCE_ID, ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowsPlayerSettings
|
||||
from components.workflows.db_management import WorkflowComponentRuntimeState, \
|
||||
WorkflowComponent, ComponentState
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.utils import get_unique_id, make_safe_id
|
||||
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataPresenter, DefaultDataFilter
|
||||
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataFilter, JiraDataProducer
|
||||
|
||||
grid_settings = DataGridSettings(
|
||||
header_visible=True,
|
||||
@@ -19,18 +24,24 @@ grid_settings = DataGridSettings(
|
||||
open_settings_visible=False)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsPlayerError(Exception):
|
||||
component_id: str
|
||||
error: Exception
|
||||
|
||||
|
||||
class WorkflowPlayer(BaseComponent):
|
||||
def __init__(self, session,
|
||||
_id=None,
|
||||
settings_manager=None,
|
||||
tabs_manager=None,
|
||||
player_settings: WorkflowsPlayerSettings = None,
|
||||
designer=None,
|
||||
boundaries: dict = None):
|
||||
super().__init__(session, _id)
|
||||
self._settings_manager = settings_manager
|
||||
self.tabs_manager = tabs_manager
|
||||
self.key = f"__WorkflowPlayer_{player_settings.workflow_name}"
|
||||
self._player_settings = player_settings
|
||||
self._designer = designer
|
||||
self.key = f"__WorkflowPlayer_{designer.get_workflow_name()}"
|
||||
self._boundaries = boundaries
|
||||
self.commands = WorkflowPlayerCommandManager(self)
|
||||
self._datagrid = InstanceManager.get(self._session,
|
||||
@@ -39,31 +50,171 @@ class WorkflowPlayer(BaseComponent):
|
||||
key=self.key,
|
||||
grid_settings=grid_settings,
|
||||
boundaries=boundaries)
|
||||
self.runtime_states = {}
|
||||
self.global_error = None
|
||||
self.has_error = False
|
||||
|
||||
def set_boundaries(self, boundaries: dict):
|
||||
self._datagrid.set_boundaries(boundaries)
|
||||
|
||||
def get_component_runtime_state(self, component_id: str):
|
||||
# return a default value if the player hasn't been played yet
|
||||
return self.runtime_states.get(component_id, WorkflowComponentRuntimeState(component_id))
|
||||
|
||||
def run(self):
|
||||
engine = WorkflowEngine()
|
||||
for component in self._player_settings.components:
|
||||
if component.type == ProcessorTypes.Producer and component.properties["processor_name"] == "Repository":
|
||||
engine.add_processor(
|
||||
TableDataProducer(self._session, self._settings_manager, component.properties["repository"],
|
||||
component.properties["table"]))
|
||||
elif component.type == ProcessorTypes.Filter and component.properties["processor_name"] == "Default":
|
||||
engine.add_processor(DefaultDataFilter(component.properties["filter"]))
|
||||
elif component.type == ProcessorTypes.Presenter and component.properties["processor_name"] == "Default":
|
||||
engine.add_processor(DefaultDataPresenter(component.properties["columns"]))
|
||||
# at least one connection is required to play
|
||||
if len(self._designer.get_workflow_connections()) == 0:
|
||||
self.global_error = "No connections defined."
|
||||
return
|
||||
|
||||
self._init_state(ComponentState.NOT_RUN)
|
||||
|
||||
try:
|
||||
sorted_components = self._get_sorted_components()
|
||||
engine = self._get_engine(sorted_components)
|
||||
|
||||
except ValueError as e:
|
||||
# Handle workflow structure errors (e.g., cycles)
|
||||
self.has_error = True
|
||||
self.global_error = f"Workflow configuration error: {e}"
|
||||
return
|
||||
except WorkflowsPlayerError as ex:
|
||||
self.has_error = True
|
||||
self.global_error = self._get_global_error_as_str(ex, "Failed to init ")
|
||||
if ex.component_id in self.runtime_states:
|
||||
self.runtime_states[ex.component_id].state = ComponentState.FAILURE
|
||||
self.runtime_states[ex.component_id].error_message = str(ex.error)
|
||||
return
|
||||
|
||||
res = engine.run_to_list()
|
||||
|
||||
if engine.has_error and not engine.errors:
|
||||
self.has_error = True
|
||||
self.global_error = engine.global_error
|
||||
|
||||
else: # loop through the components and update the runtime states
|
||||
for component in sorted_components:
|
||||
runtime_state = self.runtime_states.get(component.id)
|
||||
|
||||
if component.id not in engine.errors:
|
||||
runtime_state.state = ComponentState.SUCCESS
|
||||
continue
|
||||
|
||||
# the component failed
|
||||
error = engine.errors[component.id]
|
||||
runtime_state.state = ComponentState.FAILURE
|
||||
runtime_state.error_message = str(error)
|
||||
self.global_error = self._get_global_error_as_str(error, "Error in ") # update global error as well
|
||||
self.has_error = True
|
||||
break # the remaining components will remain as NOT_RUN
|
||||
|
||||
data = [row.as_dict() for row in res]
|
||||
df = pd.DataFrame(data)
|
||||
self._datagrid.init_from_dataframe(df)
|
||||
|
||||
def stop(self):
|
||||
self._init_state()
|
||||
|
||||
def get_dataframe(self):
|
||||
return self._datagrid.get_dataframe()
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
self._datagrid,
|
||||
id=self._id,
|
||||
)
|
||||
|
||||
def _get_sorted_components(self) -> list[WorkflowComponent]:
|
||||
"""
|
||||
Sorts the workflow components based on their connections using topological sort.
|
||||
|
||||
- A connection from component A to B means A must come before B.
|
||||
- Raises a ValueError if a cycle is detected.
|
||||
- Raises a ValueError if a connection references a non-existent component.
|
||||
- Ignores components that are not part of any connection.
|
||||
|
||||
:return: A list of sorted WorkflowComponent objects.
|
||||
"""
|
||||
components_by_id = {c.id: c for c in self._designer.get_workflow_components()}
|
||||
|
||||
# Get all component IDs involved in connections
|
||||
involved_ids = set()
|
||||
for conn in self._designer.get_workflow_connections():
|
||||
involved_ids.add(conn.from_id)
|
||||
involved_ids.add(conn.to_id)
|
||||
|
||||
# Check if all involved components exist
|
||||
for component_id in involved_ids:
|
||||
if component_id not in components_by_id:
|
||||
raise ValueError(f"Component with ID '{component_id}' referenced in connections but does not exist.")
|
||||
|
||||
# Build the graph (adjacency list and in-degrees) for involved components
|
||||
adj = {cid: [] for cid in involved_ids}
|
||||
in_degree = {cid: 0 for cid in involved_ids}
|
||||
|
||||
for conn in self._designer.get_workflow_connections():
|
||||
# from_id -> to_id
|
||||
adj[conn.from_id].append(conn.to_id)
|
||||
in_degree[conn.to_id] += 1
|
||||
|
||||
# Find all sources (nodes with in-degree 0)
|
||||
queue = deque([cid for cid in involved_ids if in_degree[cid] == 0])
|
||||
|
||||
sorted_order = []
|
||||
while queue:
|
||||
u = queue.popleft()
|
||||
sorted_order.append(u)
|
||||
|
||||
for v in adj.get(u, []):
|
||||
in_degree[v] -= 1
|
||||
if in_degree[v] == 0:
|
||||
queue.append(v)
|
||||
|
||||
# Check for cycles
|
||||
if len(sorted_order) != len(involved_ids):
|
||||
raise ValueError("A cycle was detected in the workflow connections.")
|
||||
|
||||
# Return sorted components
|
||||
return [components_by_id[cid] for cid in sorted_order]
|
||||
|
||||
def _get_engine(self, sorted_components):
|
||||
# first reorder the component, according to the connection definitions
|
||||
engine = WorkflowEngine()
|
||||
for component in sorted_components:
|
||||
key = (component.type, component.properties["processor_name"])
|
||||
try:
|
||||
if key == (ProcessorTypes.Producer, "Repository"):
|
||||
engine.add_processor(
|
||||
TableDataProducer(self._session,
|
||||
self._settings_manager,
|
||||
component.id,
|
||||
component.properties["repository"],
|
||||
component.properties["table"]))
|
||||
elif key == (ProcessorTypes.Producer, "Jira"):
|
||||
engine.add_processor(
|
||||
JiraDataProducer(self._session,
|
||||
self._settings_manager,
|
||||
component.id,
|
||||
'issues',
|
||||
component.properties["jira_jql"]))
|
||||
elif key == (ProcessorTypes.Filter, "Default"):
|
||||
engine.add_processor(DefaultDataFilter(component.id, component.properties["filter"]))
|
||||
elif key == (ProcessorTypes.Presenter, "Default"):
|
||||
engine.add_processor(DefaultDataPresenter(component.id, component.properties["columns"]))
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unsupported processor : type={component.type}, name={component.properties['processor_name']}")
|
||||
except Exception as e:
|
||||
raise WorkflowsPlayerError(component.id, e)
|
||||
|
||||
return engine
|
||||
|
||||
def _init_state(self, state: ComponentState = ComponentState.SUCCESS):
|
||||
self.global_error = None
|
||||
self.has_error = False
|
||||
self.runtime_states = {component.id: WorkflowComponentRuntimeState(component.id, state)
|
||||
for component in self._designer.get_workflow_components()}
|
||||
|
||||
@staticmethod
|
||||
def create_component_id(session, suffix=None):
|
||||
prefix = f"{WORKFLOW_PLAYER_INSTANCE_ID}{session['user_id']}"
|
||||
@@ -71,3 +222,10 @@ class WorkflowPlayer(BaseComponent):
|
||||
suffix = get_unique_id()
|
||||
|
||||
return make_safe_id(f"{prefix}{suffix}")
|
||||
|
||||
@staticmethod
|
||||
def _get_global_error_as_str(error, prefix=""):
|
||||
if hasattr(error, "component_id"):
|
||||
return f"{prefix}component '{error.component_id}': {error.error}"
|
||||
else:
|
||||
return str(error)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import enum
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
@@ -8,6 +9,15 @@ from core.settings_management import SettingsManager
|
||||
logger = logging.getLogger("WorkflowsSettings")
|
||||
|
||||
|
||||
class ComponentState(enum.Enum):
|
||||
"""
|
||||
Represents the execution state of a workflow component.
|
||||
"""
|
||||
SUCCESS = "success"
|
||||
FAILURE = "failure"
|
||||
NOT_RUN = "not_run"
|
||||
|
||||
|
||||
# Data structures
|
||||
@dataclass
|
||||
class WorkflowComponent:
|
||||
@@ -27,6 +37,16 @@ class Connection:
|
||||
to_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowComponentRuntimeState:
|
||||
"""
|
||||
Represents the runtime state of a single workflow component.
|
||||
"""
|
||||
id: str
|
||||
state: ComponentState = ComponentState.SUCCESS
|
||||
error_message: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsDesignerSettings:
|
||||
workflow_name: str = "No Name"
|
||||
@@ -41,12 +61,6 @@ class WorkflowsDesignerState:
|
||||
selected_component_id = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsPlayerSettings:
|
||||
workflow_name: str = "No Name"
|
||||
components: list[WorkflowComponent] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsSettings:
|
||||
workflows: list[str] = field(default_factory=list)
|
||||
|
||||
225
src/core/jira.py
Normal file
225
src/core/jira.py
Normal file
@@ -0,0 +1,225 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from core.Expando import Expando
|
||||
|
||||
JIRA_ROOT = "https://altares.atlassian.net/rest/api/2"
|
||||
DEFAULT_HEADERS = {"Accept": "application/json"}
|
||||
|
||||
logger = logging.getLogger("jql")
|
||||
|
||||
|
||||
class NotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Jira:
|
||||
"""Manage default operation to JIRA"""
|
||||
|
||||
def __init__(self, user_name: str, api_token: str):
|
||||
"""
|
||||
Prepare a connection to JIRA
|
||||
The initialisation do not to anything,
|
||||
It only stores the user_name and the api_token
|
||||
Note that user_name and api_token is the recommended way to connect,
|
||||
therefore, the only supported here
|
||||
:param user_name:
|
||||
:param api_token:
|
||||
"""
|
||||
self.user_name = user_name
|
||||
self.api_token = api_token
|
||||
self.auth = HTTPBasicAuth(self.user_name, self.api_token)
|
||||
|
||||
def issue(self, issue_id: str) -> Expando:
|
||||
"""
|
||||
Retrieve an issue
|
||||
:param issue_id:
|
||||
:return:
|
||||
"""
|
||||
url = f"{JIRA_ROOT}/issue/{issue_id}"
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
return Expando(json.loads(response.text))
|
||||
|
||||
def fields(self) -> list[Expando]:
|
||||
"""
|
||||
Retrieve the list of all fields for an issue
|
||||
:return:
|
||||
"""
|
||||
url = f"{JIRA_ROOT}/field"
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
return [Expando(field) for field in as_dict]
|
||||
|
||||
def jql(self, jql: str, fields="summary,status,assignee") -> list[Expando]:
|
||||
"""
|
||||
Executes a JQL and returns the list of issues
|
||||
:param jql:
|
||||
:param fields: list of fields to retrieve
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"Processing jql '{jql}'")
|
||||
url = f"{JIRA_ROOT}/search"
|
||||
|
||||
headers = DEFAULT_HEADERS.copy()
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
payload = {
|
||||
"fields": [f.strip() for f in fields.split(",")],
|
||||
"fieldsByKeys": False,
|
||||
"jql": jql,
|
||||
"maxResults": 500, # Does not seem to be used. It's always 100 !
|
||||
"startAt": 0
|
||||
}
|
||||
|
||||
result = []
|
||||
while True:
|
||||
logger.debug(f"Request startAt '{payload['startAt']}'")
|
||||
response = requests.request(
|
||||
"POST",
|
||||
url,
|
||||
data=json.dumps(payload),
|
||||
headers=headers,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(self._format_error(response))
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
result += as_dict["issues"]
|
||||
|
||||
if as_dict["startAt"] + as_dict["maxResults"] >= as_dict["total"]:
|
||||
# We retrieve more than the total nuber of items
|
||||
break
|
||||
|
||||
payload["startAt"] += as_dict["maxResults"]
|
||||
|
||||
return [Expando(issue) for issue in result]
|
||||
|
||||
def extract(self, jql, mappings, updates=None) -> list[dict]:
|
||||
"""
|
||||
Executes a jql and returns list of dict
|
||||
The <code>issue</code> object, returned by the <ref>jql</ref> methods
|
||||
contains all the fields for Jira. They are not all necessary
|
||||
This method selects the required fields
|
||||
:param jql:
|
||||
:param mappings:
|
||||
:param updates: List of updates (lambda on issue) to perform
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"Processing extract using mapping {mappings}")
|
||||
|
||||
def _get_field(mapping):
|
||||
"""Returns the meaningful jira field, for the mapping description path"""
|
||||
fields = mapping.split(".")
|
||||
return fields[1] if len(fields) > 1 and fields[0] == "fields" else fields[0]
|
||||
|
||||
# retrieve the list of requested fields from what was asked in the mapping
|
||||
jira_fields = [_get_field(mapping) for mapping in mappings]
|
||||
as_string = ", ".join(jira_fields)
|
||||
issues = self.jql(jql, as_string)
|
||||
|
||||
for issue in issues:
|
||||
# apply updates if needed
|
||||
if updates:
|
||||
for update in updates:
|
||||
update(issue)
|
||||
|
||||
row = {cvs_col: issue.get(jira_path) for jira_path, cvs_col in mappings.items() if cvs_col is not None}
|
||||
yield row
|
||||
|
||||
def get_versions(self, project_key):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
returns fixVersion number in JIRA
|
||||
:param project_key:
|
||||
:param version_name:
|
||||
:return:
|
||||
"""
|
||||
|
||||
url = f"{JIRA_ROOT}/project/{project_key}/versions"
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise NotFound()
|
||||
|
||||
as_list = json.loads(response.text)
|
||||
return [Expando(version) for version in as_list]
|
||||
|
||||
def get_version(self, project_key, version_name):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
returns fixVersion number in JIRA
|
||||
:param project_key:
|
||||
:param version_name:
|
||||
:return:
|
||||
"""
|
||||
|
||||
for version in self.get_versions(project_key):
|
||||
if version.name == version_name:
|
||||
return version
|
||||
|
||||
raise NotFound()
|
||||
|
||||
def get_all_fields(self):
|
||||
"""
|
||||
Helper function that returns the list of all field that can be requested in an issue
|
||||
:return:
|
||||
"""
|
||||
url = f"{JIRA_ROOT}/field"
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
return [Expando(issue) for issue in as_dict]
|
||||
|
||||
@staticmethod
|
||||
def update_customer_refs(issue: Expando, bug_only=True, link_name=None):
|
||||
issue["ticket_customer_refs"] = []
|
||||
if bug_only and issue.fields.issuetype.name != "Bug":
|
||||
return
|
||||
|
||||
for issue_link in issue.fields.issuelinks: # [i_link for i_link in issue.fields.issuelinks if i_link["type"]["name"] == "Relates"]:
|
||||
if link_name and issue_link["type"]["name"] not in link_name:
|
||||
continue
|
||||
|
||||
direction = "inwardIssue" if "inwardIssue" in issue_link else "outwardIssue"
|
||||
related_issue_key = issue_link[direction]["key"]
|
||||
if related_issue_key.startswith("ITSUP"):
|
||||
issue.ticket_customer_refs.append(related_issue_key)
|
||||
continue
|
||||
|
||||
@staticmethod
|
||||
def _format_error(response):
|
||||
if "errorMessages" in response.text:
|
||||
error_messages = json.loads(response.text)["errorMessages"]
|
||||
else:
|
||||
error_messages = response.text
|
||||
return f"Error {response.status_code} : {response.reason} : {error_messages}"
|
||||
103
src/workflow/DefaultDataPresenter.py
Normal file
103
src/workflow/DefaultDataPresenter.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from typing import Any
|
||||
|
||||
from core.Expando import Expando
|
||||
from workflow.engine import DataPresenter
|
||||
|
||||
|
||||
class DefaultDataPresenter(DataPresenter):
|
||||
"""Default data presenter that returns the input data unchanged."""
|
||||
|
||||
def __init__(self, component_id: str, mappings_definition: str):
|
||||
super().__init__(component_id)
|
||||
self._mappings_definition = mappings_definition
|
||||
self._split_definitions = [definition.strip() for definition in mappings_definition.split(",")]
|
||||
|
||||
if "*" not in mappings_definition:
|
||||
self._static_mappings = self._get_static_mappings()
|
||||
else:
|
||||
self._static_mappings = None
|
||||
|
||||
def present(self, data: Any) -> Any:
|
||||
self._validate_mappings_definition()
|
||||
|
||||
if self._static_mappings:
|
||||
return Expando(data.to_dict(self._static_mappings))
|
||||
|
||||
dynamic_mappings = self._get_dynamic_mappings(data)
|
||||
return Expando(data.to_dict(dynamic_mappings))
|
||||
|
||||
def _get_dynamic_mappings(self, data):
|
||||
|
||||
manage_conflicts = {}
|
||||
|
||||
mappings = {}
|
||||
for mapping in self._split_definitions:
|
||||
if "=" in mapping:
|
||||
key, value = [s.strip() for s in mapping.split('=', 1)]
|
||||
if key == "*":
|
||||
# all fields
|
||||
if value != "*":
|
||||
raise ValueError("Only '*' is accepted when renaming wildcard.")
|
||||
for key in data.as_dict().keys():
|
||||
if key in manage_conflicts:
|
||||
raise ValueError(f"Collision detected for field '{key}'. It is mapped from both '{manage_conflicts[key]}' and '{mapping}'.")
|
||||
manage_conflicts[key] = mapping
|
||||
mappings[key] = key
|
||||
elif key.endswith(".*"):
|
||||
# all fields in a sub-object
|
||||
if value != "*":
|
||||
raise ValueError("Only '*' is accepted when renaming wildcard.")
|
||||
obj_path = key[:-2]
|
||||
sub_obj = data.get(obj_path)
|
||||
if isinstance(sub_obj, dict):
|
||||
for sub_field in sub_obj:
|
||||
if sub_field in manage_conflicts:
|
||||
raise ValueError(
|
||||
f"Collision detected for field '{sub_field}'. It is mapped from both '{manage_conflicts[sub_field]}' and '{mapping}'.")
|
||||
manage_conflicts[sub_field] = mapping
|
||||
mappings[f"{obj_path}.{sub_field}"] = sub_field
|
||||
else:
|
||||
raise ValueError(f"Field '{obj_path}' is not an object.")
|
||||
else:
|
||||
mappings[key.strip()] = value.strip()
|
||||
|
||||
|
||||
else:
|
||||
if mapping == "*":
|
||||
# all fields
|
||||
for key in data.as_dict().keys():
|
||||
mappings[key] = key
|
||||
elif mapping.endswith(".*"):
|
||||
# all fields in a sub-object
|
||||
obj_path = mapping[:-2]
|
||||
sub_obj = data.get(obj_path)
|
||||
if isinstance(sub_obj, dict):
|
||||
for sub_field in sub_obj:
|
||||
mappings[f"{obj_path}.{sub_field}"] = f"{obj_path}.{sub_field}"
|
||||
else:
|
||||
raise ValueError(f"Field '{obj_path}' is not an object.")
|
||||
else:
|
||||
mappings[mapping] = mapping
|
||||
|
||||
return mappings
|
||||
|
||||
def _get_static_mappings(self):
|
||||
mappings = {}
|
||||
for mapping in self._split_definitions:
|
||||
if "=" in mapping:
|
||||
key, value = [s.strip() for s in mapping.split('=', 1)]
|
||||
mappings[key] = value
|
||||
else:
|
||||
mappings[mapping] = mapping
|
||||
|
||||
return mappings
|
||||
|
||||
def _validate_mappings_definition(self):
|
||||
last_char_was_comma = False
|
||||
for i, char in enumerate(self._mappings_definition):
|
||||
if char == ',':
|
||||
if last_char_was_comma:
|
||||
raise ValueError(f"Invalid mappings definition: Error found at index {i}")
|
||||
last_char_was_comma = True
|
||||
elif not char.isspace():
|
||||
last_char_was_comma = False
|
||||
@@ -2,14 +2,25 @@ import ast
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Generator
|
||||
|
||||
from components.admin.admin_db_manager import AdminDbManager
|
||||
from core.Expando import Expando
|
||||
from core.jira import Jira
|
||||
from core.utils import UnreferencedNamesVisitor
|
||||
from utils.Datahelper import DataHelper
|
||||
|
||||
|
||||
class DataProcessorError(Exception):
|
||||
def __init__(self, component_id, error):
|
||||
self.component_id = component_id
|
||||
self.error = error
|
||||
|
||||
|
||||
class DataProcessor(ABC):
|
||||
"""Base class for all data processing components."""
|
||||
|
||||
def __init__(self, component_id: str = None):
|
||||
self.component_id = component_id
|
||||
|
||||
@abstractmethod
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
pass
|
||||
@@ -24,7 +35,11 @@ class DataProducer(DataProcessor):
|
||||
pass
|
||||
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
yield from self.emit(data)
|
||||
try:
|
||||
yield from self.emit(data)
|
||||
|
||||
except Exception as e:
|
||||
raise DataProcessorError(self.component_id, e)
|
||||
|
||||
|
||||
class DataFilter(DataProcessor):
|
||||
@@ -36,8 +51,12 @@ class DataFilter(DataProcessor):
|
||||
pass
|
||||
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
if self.filter(data):
|
||||
yield data
|
||||
try:
|
||||
if self.filter(data):
|
||||
yield data
|
||||
|
||||
except Exception as e:
|
||||
raise DataProcessorError(self.component_id, e)
|
||||
|
||||
|
||||
class DataPresenter(DataProcessor):
|
||||
@@ -49,13 +68,18 @@ class DataPresenter(DataProcessor):
|
||||
pass
|
||||
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
yield self.present(data)
|
||||
try:
|
||||
yield self.present(data)
|
||||
|
||||
except Exception as e:
|
||||
raise DataProcessorError(self.component_id, e)
|
||||
|
||||
|
||||
class TableDataProducer(DataProducer):
|
||||
"""Base class for data producers that emit data from a repository."""
|
||||
|
||||
def __init__(self, session, settings_manager, repository_name, table_name):
|
||||
def __init__(self, session, settings_manager, component_id, repository_name, table_name):
|
||||
super().__init__(component_id)
|
||||
self._session = session
|
||||
self.settings_manager = settings_manager
|
||||
self.repository_name = repository_name
|
||||
@@ -65,35 +89,25 @@ class TableDataProducer(DataProducer):
|
||||
yield from DataHelper.get(self._session, self.settings_manager, self.repository_name, self.table_name, Expando)
|
||||
|
||||
|
||||
class DefaultDataPresenter(DataPresenter):
|
||||
"""Default data presenter that returns the input data unchanged."""
|
||||
class JiraDataProducer(DataProducer):
|
||||
"""Base class for data producers that emit data from Jira."""
|
||||
|
||||
def __init__(self, columns_as_str: str):
|
||||
super().__init__()
|
||||
if not columns_as_str or columns_as_str == "*":
|
||||
self.mappings = None
|
||||
|
||||
else:
|
||||
|
||||
self.mappings = {}
|
||||
temp_mappings = [col.strip() for col in columns_as_str.split(",")]
|
||||
for mapping in temp_mappings:
|
||||
if "=" in mapping:
|
||||
key, value = mapping.split("=")
|
||||
self.mappings[key] = value
|
||||
else:
|
||||
self.mappings[mapping] = mapping
|
||||
def __init__(self, session, settings_manager, component_id, jira_object='issues', jira_query=''):
|
||||
super().__init__(component_id)
|
||||
self._session = session
|
||||
self.settings_manager = settings_manager
|
||||
self.jira_object = jira_object
|
||||
self.jira_query = jira_query
|
||||
self.db = AdminDbManager(session, settings_manager).jira
|
||||
|
||||
def present(self, data: Any) -> Any:
|
||||
if self.mappings is None:
|
||||
return data
|
||||
|
||||
return Expando(data.to_dict(self.mappings))
|
||||
def emit(self, data: Any = None) -> Generator[Any, None, None]:
|
||||
jira = Jira(self.db.user_name, self.db.api_token)
|
||||
yield from jira.jql(self.jira_query)
|
||||
|
||||
|
||||
class DefaultDataFilter(DataFilter):
|
||||
def __init__(self, filter_expression: str):
|
||||
super().__init__()
|
||||
def __init__(self, component_id: str, filter_expression: str):
|
||||
super().__init__(component_id)
|
||||
self.filter_expression = filter_expression
|
||||
self._ast_tree = ast.parse(filter_expression, "<user input>", 'eval')
|
||||
self._compiled = compile(self._ast_tree, "<string>", "eval")
|
||||
@@ -112,6 +126,9 @@ class WorkflowEngine:
|
||||
|
||||
def __init__(self):
|
||||
self.processors: list[DataProcessor] = []
|
||||
self.has_error = False
|
||||
self.global_error = None
|
||||
self.errors = {}
|
||||
|
||||
def add_processor(self, processor: DataProcessor) -> 'WorkflowEngine':
|
||||
"""Add a data processor to the pipeline."""
|
||||
@@ -137,14 +154,18 @@ class WorkflowEngine:
|
||||
The first processor must be a DataProducer.
|
||||
"""
|
||||
if not self.processors:
|
||||
raise ValueError("No processors in the pipeline")
|
||||
self.has_error = False
|
||||
self.global_error = "No processors in the pipeline"
|
||||
raise ValueError(self.global_error)
|
||||
|
||||
first_processor = self.processors[0]
|
||||
|
||||
if not isinstance(first_processor, DataProducer):
|
||||
raise ValueError("First processor must be a DataProducer")
|
||||
self.has_error = False
|
||||
self.global_error = "First processor must be a DataProducer"
|
||||
raise ValueError(self.global_error)
|
||||
|
||||
for item in first_processor.emit():
|
||||
for item in first_processor.process(None):
|
||||
yield from self._process_single_item(item, 1)
|
||||
|
||||
def run_to_list(self) -> list[Any]:
|
||||
@@ -152,4 +173,13 @@ class WorkflowEngine:
|
||||
Run the workflow and return all results as a list.
|
||||
Use this method when you need all results at once.
|
||||
"""
|
||||
return list(self.run())
|
||||
try:
|
||||
return list(self.run())
|
||||
except DataProcessorError as err:
|
||||
self.has_error = True
|
||||
self.errors[err.component_id] = err.error
|
||||
return []
|
||||
except Exception as err:
|
||||
self.has_error = True
|
||||
self.global_error = str(err)
|
||||
return []
|
||||
|
||||
34
tests/my_mocks.py
Normal file
34
tests/my_mocks.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.tabs.components.MyTabs import MyTabs
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tabs_manager():
|
||||
class MockTabsManager(MagicMock):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, spec=MyTabs, **kwargs)
|
||||
self.request_new_tab_id = MagicMock(side_effect=["new_tab_id", "new_tab_2", "new_tab_3", StopIteration])
|
||||
self.tabs = {}
|
||||
self.tabs_by_key = {}
|
||||
|
||||
def add_tab(self, title, content, key: str | tuple = None, tab_id: str = None, icon=None):
|
||||
self.tabs[tab_id] = (title, content)
|
||||
self.tabs_by_key[key] = (title, content)
|
||||
|
||||
def set_tab_content(self, tab_id, content, title=None, key: str | tuple = None, active=None):
|
||||
self.tabs[tab_id] = (title, content)
|
||||
self.tabs_by_key[key] = (title, content)
|
||||
|
||||
def refresh(self):
|
||||
return Div(
|
||||
Div(
|
||||
[Div(title) for title in self.tabs.keys()]
|
||||
),
|
||||
list(self.tabs.values())[-1]
|
||||
)
|
||||
|
||||
return MockTabsManager()
|
||||
186
tests/test_default_data_presenter.py
Normal file
186
tests/test_default_data_presenter.py
Normal file
@@ -0,0 +1,186 @@
|
||||
import pytest
|
||||
|
||||
from core.Expando import Expando
|
||||
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||
|
||||
|
||||
def test_i_can_present_static_mappings():
|
||||
mappings_def = "field1 = renamed_1 , field2 "
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"renamed_1": "value1", "field2": "value2"}) # field3 is removed
|
||||
|
||||
|
||||
def test_the_latter_mappings_take_precedence():
|
||||
mappings_def = "field1 = renamed_1 , field1 "
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"field1": "value1"}) # field3 is removed
|
||||
|
||||
|
||||
def test_i_can_present_static_mappings_with_sub_fields():
|
||||
mappings_def = "root.field1 = renamed_1 , root.field2, root.sub_field.field3, root.sub_field.field4=renamed4 "
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"renamed_1": "value1",
|
||||
"root.field2": "value2",
|
||||
"root.sub_field.field3": "value3",
|
||||
"renamed4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings():
|
||||
mappings_def = "*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_for_complex_data():
|
||||
mappings_def = "*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}
|
||||
},
|
||||
"field5": "value5"}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == as_dict
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_with_sub_fields():
|
||||
mappings_def = "root.sub_field.*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"root.sub_field.field3": "value3",
|
||||
"root.sub_field.field4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_with_sub_fields_and_renames():
|
||||
mappings_def = "root.sub_field.*=*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"field3": "value3",
|
||||
"field4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_and_rename_them():
|
||||
mappings_def = "*=*" # does not really have effects as '*' only goes down one level
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root1": {"field1": "value1",
|
||||
"field2": "value2"},
|
||||
"root2": {"field3": "value3",
|
||||
"field4": "value4"}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == as_dict
|
||||
|
||||
|
||||
def test_i_can_present_static_and_dynamic_mappings():
|
||||
mappings_def = "root.field1 = renamed_1, root.sub_field.*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"renamed_1": "value1",
|
||||
"root.sub_field.field3": "value3",
|
||||
"root.sub_field.field4": "value4"}
|
||||
|
||||
|
||||
def test_another_example_of_static_and_dynamic_mappings():
|
||||
mappings_def = "* , field1 = renamed_1"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"renamed_1": "value1", "field2": "value2", "field3": "value3"}) # field3 is removed
|
||||
|
||||
|
||||
def test_i_can_detect_conflict_when_dynamically_renaming_a_field():
|
||||
mappings_def = "root_1.*=*, root_2.*=*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root_1": {"field1": "value1",
|
||||
"field2": "value2"},
|
||||
"root_2": {"field1": "value1",
|
||||
"field2": "value2"}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
with pytest.raises(ValueError) as e:
|
||||
presenter.present(data)
|
||||
|
||||
assert str(e.value) == "Collision detected for field 'field1'. It is mapped from both 'root_1.*=*' and 'root_2.*=*'."
|
||||
|
||||
|
||||
def test_i_can_detect_declaration_error():
|
||||
mappings_def = "field1 ,, field2"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
with pytest.raises(ValueError) as e:
|
||||
presenter.present(data)
|
||||
|
||||
|
||||
def test_i_can_detect_dynamic_error_declaration():
|
||||
mappings_def = "root.field1.*" # field1 is not an object
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
with pytest.raises(ValueError) as e:
|
||||
presenter.present(data)
|
||||
@@ -11,7 +11,7 @@ def sample_structure():
|
||||
"""
|
||||
A pytest fixture to provide a sample tree structure for testing.
|
||||
"""
|
||||
return Html(
|
||||
return Div(
|
||||
Header(cls="first-class"),
|
||||
Body(
|
||||
"hello world",
|
||||
@@ -26,13 +26,13 @@ def sample_structure():
|
||||
|
||||
@pytest.mark.parametrize("value, expected, expected_error", [
|
||||
(Div(), "value",
|
||||
"The types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=div((),{})\nexpected=value."),
|
||||
"The types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=<div></div>\nexpected=value."),
|
||||
(Div(), A(),
|
||||
"The elements are different: 'div' != 'a'."),
|
||||
(Div(Div()), Div(A()),
|
||||
"Path 'div':\n\tThe elements are different: 'div' != 'a'."),
|
||||
(Div(A(Span())), Div(A("element")),
|
||||
"Path 'div.a':\n\tThe types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=span((),{})\nexpected=element."),
|
||||
"Path 'div.a':\n\tThe types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=<span></span>\nexpected=element."),
|
||||
(Div(attr="one"), Div(attr="two"),
|
||||
"Path 'div':\n\tThe values are different for 'attr' : 'one' != 'two'."),
|
||||
(Div(A(attr="alpha")), Div(A(attr="beta")),
|
||||
|
||||
@@ -8,14 +8,16 @@ from components.workflows.constants import ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, Connection
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, Contains
|
||||
from my_mocks import tabs_manager
|
||||
|
||||
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def designer(session):
|
||||
def designer(session, tabs_manager):
|
||||
return WorkflowDesigner(session=session, _id=TEST_WORKFLOW_DESIGNER_ID,
|
||||
settings_manager=SettingsManager(engine=MemoryDbEngine()),
|
||||
tabs_manager=tabs_manager,
|
||||
key=TEST_WORKFLOW_DESIGNER_ID,
|
||||
designer_settings=WorkflowsDesignerSettings("Workflow Name"),
|
||||
boundaries={"height": 500, "width": 800}
|
||||
@@ -71,6 +73,7 @@ def test_i_can_render_no_component(designer):
|
||||
expected = Div(
|
||||
H1("Workflow Name"),
|
||||
P("Drag components from the toolbox to the canvas to create your workflow."),
|
||||
Div(id=f"t_{designer.get_id()}"), # media + error message
|
||||
Div(id=f"d_{designer.get_id()}"), # designer container
|
||||
Div(cls="wkf-splitter"),
|
||||
Div(id=f"p_{designer.get_id()}"), # properties panel
|
||||
@@ -83,7 +86,7 @@ def test_i_can_render_no_component(designer):
|
||||
|
||||
def test_i_can_render_a_producer(designer, producer_component):
|
||||
component = producer_component
|
||||
actual = designer._mk_workflow_component(component)
|
||||
actual = designer._mk_component(component)
|
||||
expected = Div(
|
||||
# input connection point
|
||||
Div(cls="wkf-connection-point wkf-input-point",
|
||||
|
||||
@@ -2,6 +2,8 @@ from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from core.Expando import Expando
|
||||
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||
from workflow.engine import WorkflowEngine, DataProcessor, DataProducer, DataFilter, DataPresenter
|
||||
|
||||
|
||||
@@ -11,6 +13,24 @@ def engine():
|
||||
return WorkflowEngine()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def presenter_sample_data():
|
||||
return Expando({
|
||||
"id": 123,
|
||||
"title": "My Awesome Task",
|
||||
"creator": {
|
||||
"id": 1,
|
||||
"name": "John Doe",
|
||||
"email": "john.doe@example.com"
|
||||
},
|
||||
"assignee": {
|
||||
"id": 2,
|
||||
"name": "Jane Smith",
|
||||
"email": "jane.smith@example.com"
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
def test_empty_workflow_initialization(engine):
|
||||
"""Test that a new WorkflowEngine has no processors."""
|
||||
assert len(engine.processors) == 0
|
||||
@@ -124,3 +144,21 @@ def test_branching_workflow(engine):
|
||||
|
||||
result = engine.run_to_list()
|
||||
assert result == [1, 10, 2, 20]
|
||||
|
||||
|
||||
def test_presenter_i_can_use_wildcards(presenter_sample_data):
|
||||
presenter1 = DefaultDataPresenter("component_id", "id, creator.*")
|
||||
res = presenter1.present(presenter_sample_data).as_dict()
|
||||
assert res == {"id": 123, "creator.id": 1, "creator.name": "John Doe", "creator.email": "john.doe@example.com"}
|
||||
|
||||
|
||||
def test_presenter_i_can_rename_wildcard_with_specific_override(presenter_sample_data):
|
||||
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, creator.name=author_name")
|
||||
res = presenter1.present(presenter_sample_data).as_dict()
|
||||
assert res == {"id": 1, "email": "john.doe@example.com", "author_name": "John Doe"}
|
||||
|
||||
|
||||
def test_presenter_i_can_manage_collisions(presenter_sample_data):
|
||||
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, assignee.*=*")
|
||||
with pytest.raises(ValueError, match="Collision detected for field"):
|
||||
presenter1.present(presenter_sample_data).as_dict()
|
||||
|
||||
216
tests/test_workflow_player.py
Normal file
216
tests/test_workflow_player.py
Normal file
@@ -0,0 +1,216 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
from components.workflows.components.WorkflowDesigner import COMPONENT_TYPES, WorkflowDesigner
|
||||
from components.workflows.components.WorkflowPlayer import WorkflowPlayer, WorkflowsPlayerError
|
||||
from components.workflows.constants import ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowComponent, Connection, ComponentState, WorkflowsDesignerSettings
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from my_mocks import tabs_manager
|
||||
from workflow.engine import DataProcessorError
|
||||
|
||||
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
|
||||
TEST_WORKFLOW_PLAYER_ID = "workflow_player_id"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def settings_manager():
|
||||
return SettingsManager(MemoryDbEngine())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def designer(session, settings_manager, tabs_manager):
|
||||
components = [
|
||||
WorkflowComponent(
|
||||
"comp_producer",
|
||||
ProcessorTypes.Producer,
|
||||
10, 100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Producer]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Producer]["description"],
|
||||
{"processor_name": "Repository"}
|
||||
),
|
||||
WorkflowComponent(
|
||||
"comp_filter",
|
||||
ProcessorTypes.Filter,
|
||||
40, 100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Filter]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Filter]["description"],
|
||||
{"processor_name": "Default"}
|
||||
),
|
||||
WorkflowComponent(
|
||||
"comp_presenter",
|
||||
ProcessorTypes.Presenter,
|
||||
70, 100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Presenter]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Presenter]["description"],
|
||||
{"processor_name": "Default"}
|
||||
)
|
||||
]
|
||||
connections = [
|
||||
Connection("conn_1", "comp_producer", "comp_filter"),
|
||||
Connection("conn_2", "comp_filter", "comp_presenter"),
|
||||
]
|
||||
|
||||
designer = WorkflowDesigner(
|
||||
session,
|
||||
TEST_WORKFLOW_DESIGNER_ID,
|
||||
settings_manager,
|
||||
tabs_manager,
|
||||
"Workflow Designer",
|
||||
WorkflowsDesignerSettings(workflow_name="Test Workflow"),
|
||||
{"height": 500, "width": 800}
|
||||
)
|
||||
|
||||
designer._state.components = {c.id: c for c in components}
|
||||
designer._state.connections = connections
|
||||
|
||||
return designer
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def player(session, settings_manager, tabs_manager, designer):
|
||||
"""
|
||||
Sets up a standard WorkflowPlayer instance with a 3-component linear workflow.
|
||||
A helper method 'get_dataframe' is attached for easier testing.
|
||||
"""
|
||||
|
||||
return WorkflowPlayer(session=session,
|
||||
_id=TEST_WORKFLOW_PLAYER_ID,
|
||||
settings_manager=settings_manager,
|
||||
tabs_manager=tabs_manager,
|
||||
designer=designer,
|
||||
boundaries={"height": 500, "width": 800}
|
||||
)
|
||||
|
||||
|
||||
def test_run_successful_workflow(player, mocker):
|
||||
"""
|
||||
Tests the "happy path" where the workflow runs successfully from start to finish.
|
||||
"""
|
||||
# 1. Arrange: Mock a successful engine run
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.has_error = False
|
||||
mock_result_data = [
|
||||
MagicMock(as_dict=lambda: {'col_a': 1, 'col_b': 'x'}),
|
||||
MagicMock(as_dict=lambda: {'col_a': 2, 'col_b': 'y'})
|
||||
]
|
||||
mock_engine.run_to_list.return_value = mock_result_data
|
||||
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check for success state and correct data
|
||||
assert not player.has_error
|
||||
assert player.global_error is None
|
||||
for component_id, state in player.runtime_states.items():
|
||||
assert state.state == ComponentState.SUCCESS
|
||||
|
||||
player._get_engine.assert_called_once()
|
||||
mock_engine.run_to_list.assert_called_once()
|
||||
|
||||
expected_df = pd.DataFrame([row.as_dict() for row in mock_result_data])
|
||||
assert_frame_equal(player.get_dataframe(), expected_df)
|
||||
|
||||
|
||||
def test_run_with_cyclical_dependency(player, mocker):
|
||||
"""
|
||||
Tests that a workflow with a cycle is detected and handled before execution.
|
||||
"""
|
||||
# 1. Arrange: Introduce a cycle and spy on engine creation
|
||||
player._designer._state.connections.append(Connection("conn_3", "comp_presenter", "comp_producer"))
|
||||
spy_get_engine = mocker.spy(player, '_get_engine')
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check for the specific cycle error
|
||||
assert player.has_error
|
||||
assert "Workflow configuration error: A cycle was detected" in player.global_error
|
||||
spy_get_engine.assert_not_called()
|
||||
|
||||
|
||||
def test_run_with_component_initialization_failure(player, mocker):
|
||||
"""
|
||||
Tests that an error during a component's initialization is handled correctly.
|
||||
"""
|
||||
# 1. Arrange: Make the engine creation fail for a specific component
|
||||
failing_component_id = "comp_filter"
|
||||
error = ValueError("Missing a required property")
|
||||
mocker.patch.object(player, '_get_engine', side_effect=WorkflowsPlayerError(failing_component_id, error))
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check that the specific component is marked as failed
|
||||
assert player.has_error
|
||||
assert f"Failed to init component '{failing_component_id}'" in player.global_error
|
||||
assert player.runtime_states[failing_component_id].state == ComponentState.FAILURE
|
||||
assert str(error) in player.runtime_states[failing_component_id].error_message
|
||||
assert player.runtime_states["comp_producer"].state == ComponentState.NOT_RUN
|
||||
|
||||
|
||||
def test_run_with_failure_in_middle_component(player, mocker):
|
||||
"""
|
||||
Tests failure in a middle component updates all component states correctly.
|
||||
"""
|
||||
# 1. Arrange: Mock an engine that fails at the filter component
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.has_error = True
|
||||
failing_component_id = "comp_filter"
|
||||
error = RuntimeError("Data processing failed unexpectedly")
|
||||
mock_engine.errors = {failing_component_id: DataProcessorError(failing_component_id, error)}
|
||||
mock_engine.run_to_list.return_value = []
|
||||
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check the state of each component in the chain
|
||||
assert player.has_error
|
||||
assert f"Error in component 'comp_filter':" in player.global_error
|
||||
assert player.runtime_states["comp_producer"].state == ComponentState.SUCCESS
|
||||
assert player.runtime_states[failing_component_id].state == ComponentState.FAILURE
|
||||
assert str(error) in player.runtime_states[failing_component_id].error_message
|
||||
assert player.runtime_states["comp_presenter"].state == ComponentState.NOT_RUN
|
||||
|
||||
|
||||
def test_run_with_empty_workflow(player, mocker):
|
||||
"""
|
||||
Tests that running a workflow with no components completes without errors.
|
||||
"""
|
||||
# 1. Arrange: Clear components and connections
|
||||
player._designer._state.components = {}
|
||||
player._designer._state.connections = []
|
||||
spy_get_engine = mocker.spy(player, '_get_engine')
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Ensure it finishes cleanly with no data
|
||||
assert not player.has_error
|
||||
assert player.global_error == 'No connections defined.'
|
||||
spy_get_engine.assert_not_called()
|
||||
|
||||
|
||||
def test_run_with_global_engine_error(player, mocker):
|
||||
"""
|
||||
Tests a scenario where the engine reports a global error not tied to a specific component.
|
||||
"""
|
||||
# 1. Arrange: Mock a global engine failure
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.has_error = True
|
||||
mock_engine.errors = {} # No specific component error
|
||||
mock_engine.global_error = "A simulated global engine failure"
|
||||
mock_engine.run_to_list.return_value = []
|
||||
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: The player should report the global error from the engine
|
||||
assert player.has_error
|
||||
assert player.global_error == mock_engine.global_error
|
||||
@@ -1,44 +1,15 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.form.components.MyForm import FormField, MyForm
|
||||
from components.tabs.components.MyTabs import MyTabs
|
||||
from components.workflows.components.Workflows import Workflows
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, div_icon, search_elements_by_name, Contains
|
||||
from my_mocks import tabs_manager
|
||||
|
||||
TEST_WORKFLOWS_ID = "testing_repositories_id"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tabs_manager():
|
||||
class MockTabsManager(MagicMock):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, spec=MyTabs, **kwargs)
|
||||
self.request_new_tab_id = MagicMock(side_effect =["new_tab_id", "new_tab_2", "new_tab_3", StopIteration])
|
||||
self.tabs = {}
|
||||
self.tabs_by_key = {}
|
||||
|
||||
def add_tab(self, title, content, key: str | tuple = None, tab_id: str = None, icon=None):
|
||||
self.tabs[tab_id] = (title, content)
|
||||
self.tabs_by_key[key] = (title, content)
|
||||
|
||||
def set_tab_content(self, tab_id, content, title=None, key: str | tuple = None, active=None):
|
||||
self.tabs[tab_id] = (title, content)
|
||||
self.tabs_by_key[key] = (title, content)
|
||||
|
||||
def refresh(self):
|
||||
return Div(
|
||||
Div(
|
||||
[Div(title) for title in self.tabs.keys()]
|
||||
),
|
||||
list(self.tabs.values())[-1]
|
||||
)
|
||||
|
||||
return MockTabsManager()
|
||||
|
||||
boundaries = {"height": 500, "width": 800}
|
||||
|
||||
@pytest.fixture
|
||||
def workflows(session, tabs_manager):
|
||||
@@ -117,7 +88,7 @@ def test_i_can_add_a_new_workflow(workflows, tabs_manager):
|
||||
res = workflows.request_new_workflow()
|
||||
tab_id = list(res.tabs.keys())[0]
|
||||
|
||||
actual = workflows.add_new_workflow(tab_id, "Not relevant here", "New Workflow", {})
|
||||
actual = workflows.add_new_workflow(tab_id, "Not relevant here", "New Workflow", boundaries)
|
||||
|
||||
expected = (
|
||||
Div(
|
||||
@@ -134,11 +105,11 @@ def test_i_can_add_a_new_workflow(workflows, tabs_manager):
|
||||
|
||||
|
||||
def test_i_can_select_a_workflow(workflows):
|
||||
workflows.add_new_workflow("tab_id_1", "Not relevant", "workflow 1", {})
|
||||
workflows.add_new_workflow("tab_id_2", "Not relevant", "workflow 2", {})
|
||||
workflows.add_new_workflow("tab_id_3", "Not relevant", "workflow 3", {})
|
||||
workflows.add_new_workflow("tab_id_1", "Not relevant", "workflow 1", boundaries)
|
||||
workflows.add_new_workflow("tab_id_2", "Not relevant", "workflow 2", boundaries)
|
||||
workflows.add_new_workflow("tab_id_3", "Not relevant", "workflow 3", boundaries)
|
||||
|
||||
actual = workflows.show_workflow("workflow 2", {})
|
||||
actual = workflows.show_workflow("workflow 2", boundaries)
|
||||
|
||||
expected = (
|
||||
Div(
|
||||
@@ -150,4 +121,4 @@ def test_i_can_select_a_workflow(workflows):
|
||||
Div(), # Workflow Designer embedded in the tab
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
assert matches(actual, expected)
|
||||
|
||||
Reference in New Issue
Block a user