Compare commits
59 Commits
master
...
ResolvingP
| Author | SHA1 | Date | |
|---|---|---|---|
| 7dc7687b25 | |||
| f08ae4a90b | |||
| b48aaf4621 | |||
| 2c5fe004f5 | |||
| 9cf0e5e26a | |||
| 67abb45804 | |||
| 5820efb7f1 | |||
| 8eca1da3ca | |||
| 97a5989390 | |||
| e73709c859 | |||
| f0d98d23ff | |||
| 64e7c44a7d | |||
| 3a1870a160 | |||
| c2fcfbb2ab | |||
| e74639c042 | |||
| badc2e28b0 | |||
| 4ac3eb2dfa | |||
| 2bd998fe69 | |||
| c694f42c07 | |||
| 6949bb2814 | |||
| 14f079d5f9 | |||
| 3ca23449e4 | |||
| a6f765c624 | |||
| 43e7dd5f00 | |||
| 37c91d0d5d | |||
|
|
72f5f30da6 | ||
| fb82365980 | |||
| aa8aa8f58c | |||
| 1ceddfac7c | |||
|
|
34f959812b | ||
|
|
48b5c057f0 | ||
|
|
0d7b94a045 | ||
| e793aeda95 | |||
| a0cf5aff0c | |||
| d064a553dd | |||
| 6f17f6ee1f | |||
| ed793995fb | |||
| f3deeaefd1 | |||
| fdf05edec3 | |||
| bdd954b243 | |||
| 2754312141 | |||
| d0f7536fa0 | |||
| 2b288348e2 | |||
| 03ed1af7e6 | |||
| 8135e3d8af | |||
| e8fc972f98 | |||
| 14be07720f | |||
| e183584f52 | |||
| 60872a0aec | |||
| 9df32e3b5f | |||
| aed1022be3 | |||
| f86f4852c7 | |||
| 8e718ecb67 | |||
| 46c14ad3e8 | |||
| 797273e603 | |||
| d90613119f | |||
| f4e8f7a16c | |||
| 7f6a19813d | |||
| 4b06a0fe9b |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -11,6 +11,9 @@ tests/TestDBEngineRoot
|
||||
.sesskey
|
||||
tools.db
|
||||
.mytools_db
|
||||
.idea/MyManagingTools.iml
|
||||
.idea/misc.xml
|
||||
**/*.prof
|
||||
|
||||
# Created by .ignore support plugin (hsz.mobi)
|
||||
### Python template
|
||||
@@ -196,4 +199,4 @@ fabric.properties
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
# idea folder, uncomment if you don't need it
|
||||
# .idea
|
||||
# .idea
|
||||
|
||||
11
.idea/MyManagingTools.iml
generated
11
.idea/MyManagingTools.iml
generated
@@ -1,11 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.12 (MyManagingTools)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
5
.idea/codeStyles/codeStyleConfig.xml
generated
5
.idea/codeStyles/codeStyleConfig.xml
generated
@@ -1,5 +0,0 @@
|
||||
<component name="ProjectCodeStyleConfiguration">
|
||||
<state>
|
||||
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
|
||||
</state>
|
||||
</component>
|
||||
6
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
6
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyInitNewSignatureInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||
</profile>
|
||||
</component>
|
||||
7
.idea/misc.xml
generated
7
.idea/misc.xml
generated
@@ -1,7 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.12 (MyManagingTools)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (MyManagingTools)" project-jdk-type="Python SDK" />
|
||||
</project>
|
||||
2
.idea/vcs.xml
generated
2
.idea/vcs.xml
generated
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
4
Makefile
4
Makefile
@@ -18,7 +18,9 @@ clean:
|
||||
rm -rf Untitled*.ipynb
|
||||
rm -rf .ipynb_checkpoints
|
||||
rm -rf src/tools.db
|
||||
rm -rf src/*.out
|
||||
rm -rf src/*.prof
|
||||
find . -name '.sesskey' -exec rm -rf {} +
|
||||
find . -name '.pytest_cache' -exec rm -rf {} +
|
||||
find . -name '__pycache__' -exec rm -rf {} +
|
||||
find . -name 'debug.txt' -exec rm -rf {}
|
||||
find . -name 'debug.txt' -exec rm -rf {}
|
||||
|
||||
@@ -14,7 +14,7 @@ python main.py
|
||||
```shell
|
||||
docker-compose up -d
|
||||
```
|
||||
The application will be accessible on port 8000 (or whatever port you configured).
|
||||
The application will be accessible on port 8001 (if the docker compose file was not changed !).
|
||||
|
||||
2. **Initialize the Mistral model** (first run):
|
||||
```shell
|
||||
@@ -34,4 +34,11 @@ docker-compose down
|
||||
1. **Rebuild**:
|
||||
```shell
|
||||
docker-compose build
|
||||
```
|
||||
|
||||
# Profiling
|
||||
```shell
|
||||
cd src
|
||||
python -m cProfile -o profile.out main.py
|
||||
snakeviz profile.out # 'pip install snakeviz' if snakeviz is not installed
|
||||
```
|
||||
@@ -1,36 +1,59 @@
|
||||
annotated-types==0.7.0
|
||||
anyio==4.6.0
|
||||
apsw==3.50.2.0
|
||||
apswutils==0.1.0
|
||||
Arpeggio==2.0.2
|
||||
beautifulsoup4==4.12.3
|
||||
certifi==2024.8.30
|
||||
charset-normalizer==3.4.2
|
||||
click==8.1.7
|
||||
fastcore==1.7.8
|
||||
fastlite==0.0.11
|
||||
et-xmlfile==1.1.0
|
||||
fastcore==1.8.5
|
||||
fastlite==0.2.1
|
||||
h11==0.14.0
|
||||
httpcore==1.0.5
|
||||
httptools==0.6.1
|
||||
httpx==0.27.2
|
||||
httpx-sse==0.4.0
|
||||
idna==3.10
|
||||
iniconfig==2.0.0
|
||||
itsdangerous==2.2.0
|
||||
markdown-it-py==3.0.0
|
||||
mcp==1.9.2
|
||||
mdurl==0.1.2
|
||||
numpy==2.1.1
|
||||
oauthlib==3.2.2
|
||||
openpyxl==3.1.5
|
||||
packaging==24.1
|
||||
pandas==2.2.3
|
||||
pluggy==1.5.0
|
||||
pydantic==2.11.5
|
||||
pydantic-settings==2.9.1
|
||||
pydantic_core==2.33.2
|
||||
Pygments==2.19.1
|
||||
pytest==8.3.3
|
||||
pytest-mock==3.14.1
|
||||
python-dateutil==2.9.0.post0
|
||||
python-dotenv==1.0.1
|
||||
python-fasthtml==0.6.4
|
||||
python-fasthtml==0.12.21
|
||||
python-multipart==0.0.10
|
||||
pytz==2024.2
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
rich==14.0.0
|
||||
shellingham==1.5.4
|
||||
six==1.16.0
|
||||
sniffio==1.3.1
|
||||
soupsieve==2.6
|
||||
sqlite-minutils==3.37.0.post3
|
||||
sse-starlette==2.3.6
|
||||
starlette==0.38.5
|
||||
typer==0.16.0
|
||||
typing-inspection==0.4.1
|
||||
typing_extensions==4.13.2
|
||||
tzdata==2024.1
|
||||
urllib3==2.4.0
|
||||
uvicorn==0.30.6
|
||||
uvloop==0.20.0
|
||||
watchfiles==0.24.0
|
||||
websockets==13.1
|
||||
|
||||
pandas~=2.2.3
|
||||
numpy~=2.1.1
|
||||
requests~=2.32.3
|
||||
mcp~=1.9.2
|
||||
@@ -10,3 +10,15 @@ icon_dismiss_regular = NotStr(
|
||||
</g>
|
||||
</svg>"""
|
||||
)
|
||||
|
||||
# Fluent Add16Regular
|
||||
icon_add_regular = NotStr("""<svg name="add" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 16 16">
|
||||
<g fill="none">
|
||||
<path d="M8 2.5a.5.5 0 0 0-1 0V7H2.5a.5.5 0 0 0 0 1H7v4.5a.5.5 0 0 0 1 0V8h4.5a.5.5 0 0 0 0-1H8V2.5z" fill="currentColor">
|
||||
</path>
|
||||
</g>
|
||||
</svg>
|
||||
""")
|
||||
|
||||
# Fluent ErrorCircle20Regular
|
||||
icon_error = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M10 2a8 8 0 1 1 0 16a8 8 0 0 1 0-16zm0 1a7 7 0 1 0 0 14a7 7 0 0 0 0-14zm0 9.5a.75.75 0 1 1 0 1.5a.75.75 0 0 1 0-1.5zM10 6a.5.5 0 0 1 .492.41l.008.09V11a.5.5 0 0 1-.992.09L9.5 11V6.5A.5.5 0 0 1 10 6z" fill="currentColor"></path></g></svg>""")
|
||||
@@ -6,6 +6,9 @@
|
||||
--mmt-tooltip-zindex: 10;
|
||||
--datagrid-drag-drop-zindex: 5;
|
||||
--datagrid-resize-zindex: 1;
|
||||
--color-splitter: color-mix(in oklab, var(--color-base-content) 50%, #0000);
|
||||
--color-splitter-active: color-mix(in oklab, var(--color-base-content) 50%, #ffff);
|
||||
--color-btn-hover: color-mix(in oklab, var(--btn-color, var(--color-base-200)), #000 7%);
|
||||
}
|
||||
|
||||
.mmt-tooltip-container {
|
||||
@@ -28,6 +31,38 @@
|
||||
transition: opacity 0.3s ease; /* No delay when becoming visible */
|
||||
}
|
||||
|
||||
.mmt-visible-on-hover {
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
transition: opacity 0.2s ease, visibility 0s linear 0.2s;
|
||||
}
|
||||
|
||||
.mmt-btn {
|
||||
user-select: none;
|
||||
border-style: solid;
|
||||
}
|
||||
|
||||
.mmt-btn:hover {
|
||||
background-color: var(--color-btn-hover);
|
||||
}
|
||||
|
||||
.mmt-btn-disabled {
|
||||
opacity: 0.5;
|
||||
/*cursor: not-allowed;*/
|
||||
}
|
||||
|
||||
/* When parent is hovered, show the child elements with this class */
|
||||
*:hover > .mmt-visible-on-hover {
|
||||
opacity: 1;
|
||||
visibility: visible;
|
||||
transition: opacity 0.2s ease;
|
||||
}
|
||||
|
||||
.mmt-selected {
|
||||
background-color: var(--color-base-300);
|
||||
border-radius: .25rem;
|
||||
}
|
||||
|
||||
.icon-32 {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
@@ -42,6 +77,8 @@
|
||||
width: 24px;
|
||||
min-width: 24px;
|
||||
height: 24px;
|
||||
margin-top: auto;
|
||||
margin-bottom: auto;
|
||||
}
|
||||
|
||||
.icon-24 svg {
|
||||
@@ -65,7 +102,6 @@
|
||||
padding-top: 4px;
|
||||
}
|
||||
|
||||
|
||||
.icon-16 {
|
||||
width: 16px;
|
||||
min-width: 16px;
|
||||
@@ -82,7 +118,6 @@
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
|
||||
.icon-bool {
|
||||
display: block;
|
||||
width: 20px;
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
const tooltipElementId = "mmt-app"
|
||||
|
||||
function bindTooltipsWithDelegation() {
|
||||
// To display the tooltip, the attribute 'data-tooltip' is mandatory => it contains the text to tooltip
|
||||
// Then
|
||||
// the 'truncate' to show only when the text is truncated
|
||||
// the class 'mmt-tooltip' for force the display
|
||||
|
||||
const elementId = tooltipElementId
|
||||
console.debug("bindTooltips on element " + elementId);
|
||||
|
||||
@@ -20,11 +25,19 @@ function bindTooltipsWithDelegation() {
|
||||
|
||||
// Add a single mouseenter and mouseleave listener to the parent element
|
||||
element.addEventListener("mouseenter", (event) => {
|
||||
//console.debug("Entering element", event.target)
|
||||
|
||||
const cell = event.target.closest("[data-tooltip]");
|
||||
if (!cell) return;
|
||||
if (!cell) {
|
||||
// console.debug(" No 'data-tooltip' attribute found. Stopping.");
|
||||
return;
|
||||
}
|
||||
|
||||
const no_tooltip = element.hasAttribute("mmt-no-tooltip");
|
||||
if (no_tooltip) return;
|
||||
if (no_tooltip) {
|
||||
// console.debug(" Attribute 'mmt-no-tooltip' found. Cancelling.");
|
||||
return;
|
||||
}
|
||||
|
||||
const content = cell.querySelector(".truncate") || cell;
|
||||
const isOverflowing = content.scrollWidth > content.clientWidth;
|
||||
@@ -88,4 +101,125 @@ function enableTooltip() {
|
||||
}
|
||||
|
||||
element.removeAttribute("mmt-no-tooltip");
|
||||
}
|
||||
}
|
||||
|
||||
// Function to save form data to browser storage and track user input in real time
|
||||
function saveFormData(formId) {
|
||||
const form = document.getElementById(formId);
|
||||
if (!form) {
|
||||
console.error(`Form with ID '${formId}' not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
const storageKey = `formData_${formId}`;
|
||||
|
||||
// Function to save current form state
|
||||
function saveCurrentState() {
|
||||
const formData = {};
|
||||
|
||||
// Get all input elements
|
||||
const inputs = form.querySelectorAll('input, select, textarea');
|
||||
|
||||
inputs.forEach(input => {
|
||||
if (input.type === 'checkbox' || input.type === 'radio') {
|
||||
formData[input.name || input.id] = input.checked;
|
||||
} else {
|
||||
formData[input.name || input.id] = input.value;
|
||||
}
|
||||
});
|
||||
|
||||
// Store in browser storage
|
||||
const dataToStore = {
|
||||
timestamp: new Date().toISOString(),
|
||||
data: formData
|
||||
};
|
||||
|
||||
try {
|
||||
localStorage.setItem(storageKey, JSON.stringify(dataToStore));
|
||||
} catch (error) {
|
||||
console.error('Error saving form data:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Add event listeners for real-time tracking
|
||||
const inputs = form.querySelectorAll('input, select, textarea');
|
||||
|
||||
inputs.forEach(input => {
|
||||
// For text inputs, textareas, and selects
|
||||
if (input.type === 'text' || input.type === 'email' || input.type === 'password' ||
|
||||
input.type === 'number' || input.type === 'tel' || input.type === 'url' ||
|
||||
input.tagName === 'TEXTAREA' || input.tagName === 'SELECT') {
|
||||
|
||||
// Use 'input' event for real-time tracking
|
||||
input.addEventListener('input', saveCurrentState);
|
||||
// Also use 'change' event as fallback
|
||||
input.addEventListener('change', saveCurrentState);
|
||||
}
|
||||
|
||||
// For checkboxes and radio buttons
|
||||
if (input.type === 'checkbox' || input.type === 'radio') {
|
||||
input.addEventListener('change', saveCurrentState);
|
||||
}
|
||||
});
|
||||
|
||||
// Save initial state
|
||||
saveCurrentState();
|
||||
|
||||
console.debug(`Real-time form tracking enabled for form: ${formId}`);
|
||||
}
|
||||
|
||||
// Function to restore form data from browser storage
|
||||
function restoreFormData(formId) {
|
||||
const form = document.getElementById(formId);
|
||||
if (!form) {
|
||||
console.error(`Form with ID '${formId}' not found`);
|
||||
return;
|
||||
}
|
||||
|
||||
const storageKey = `formData_${formId}`;
|
||||
|
||||
try {
|
||||
const storedData = localStorage.getItem(storageKey);
|
||||
|
||||
if (storedData) {
|
||||
const parsedData = JSON.parse(storedData);
|
||||
const formData = parsedData.data;
|
||||
|
||||
// Restore all input values
|
||||
const inputs = form.querySelectorAll('input, select, textarea');
|
||||
|
||||
inputs.forEach(input => {
|
||||
const key = input.name || input.id;
|
||||
if (formData.hasOwnProperty(key)) {
|
||||
if (input.type === 'checkbox' || input.type === 'radio') {
|
||||
input.checked = formData[key];
|
||||
} else {
|
||||
input.value = formData[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error restoring form data:', error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function bindFormData(formId) {
|
||||
console.debug("bindFormData on form " + (formId));
|
||||
restoreFormData(formId);
|
||||
saveFormData(formId);
|
||||
}
|
||||
|
||||
// Function to clear saved form data
|
||||
function clearFormData(formId) {
|
||||
const storageKey = `formData_${formId}`;
|
||||
|
||||
try {
|
||||
localStorage.removeItem(storageKey);
|
||||
console.log(`Cleared saved data for form: ${formId}`);
|
||||
} catch (error) {
|
||||
console.error('Error clearing form data:', error);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,3 +34,20 @@ class BaseComponent:
|
||||
@staticmethod
|
||||
def create_component_id(session):
|
||||
pass
|
||||
|
||||
|
||||
class BaseComponentSingleton(BaseComponent):
|
||||
"""
|
||||
Base class for components that will have a single instance per user
|
||||
"""
|
||||
|
||||
COMPONENT_INSTANCE_ID = None
|
||||
|
||||
def __init__(self, session, _id=None, settings_manager=None, tabs_manager=None, **kwargs):
|
||||
super().__init__(session, _id, **kwargs)
|
||||
self._settings_manager = settings_manager
|
||||
self.tabs_manager = tabs_manager
|
||||
|
||||
@classmethod
|
||||
def create_component_id(cls, session):
|
||||
return f"{cls.COMPONENT_INSTANCE_ID}{session['user_id']}"
|
||||
|
||||
@@ -48,4 +48,28 @@ def post(session, _id: str, content: str):
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.ImportHolidays} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.import_holidays()
|
||||
return instance.import_holidays()
|
||||
|
||||
@rt(Routes.ConfigureJira)
|
||||
def get(session, _id: str, boundaries: str):
|
||||
logger.debug(f"Entering {Routes.ConfigureJira} - GET with args {debug_session(session)}, {_id=}, {boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.show_configure_jira(json.loads(boundaries) if boundaries else None)
|
||||
|
||||
@rt(Routes.ConfigureJira)
|
||||
def post(session, _id: str, args: dict):
|
||||
logger.debug(f"Entering {Routes.ConfigureJira} - POST with args {debug_session(session)}, {_id=}, {args=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.update_jira_settings(args)
|
||||
|
||||
@rt(Routes.ConfigureJiraCancel)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.ConfigureJiraCancel} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.cancel_jira_settings()
|
||||
|
||||
@rt(Routes.ConfigureJiraTest)
|
||||
def post(session, _id: str, args: dict):
|
||||
logger.debug(f"Entering {Routes.ConfigureJiraTest} with args {debug_session(session)}, {_id=}, {args=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.test_jira_settings(args)
|
||||
@@ -23,9 +23,16 @@ class AiBuddySettingsEntry:
|
||||
self.ollama_port = port
|
||||
|
||||
|
||||
@dataclass()
|
||||
class JiraSettingsEntry:
|
||||
user_name: str = ""
|
||||
api_token: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdminSettings:
|
||||
ai_buddy: AiBuddySettingsEntry = field(default_factory=AiBuddySettingsEntry)
|
||||
jira: JiraSettingsEntry = field(default_factory=JiraSettingsEntry)
|
||||
|
||||
|
||||
class AdminDbManager:
|
||||
@@ -37,3 +44,8 @@ class AdminDbManager:
|
||||
ADMIN_SETTINGS_ENTRY,
|
||||
AdminSettings,
|
||||
"ai_buddy")
|
||||
self.jira = NestedSettingsManager(session,
|
||||
settings_manager,
|
||||
ADMIN_SETTINGS_ENTRY,
|
||||
AdminSettings,
|
||||
"jira")
|
||||
|
||||
31
src/components/admin/assets/icons.py
Normal file
31
src/components/admin/assets/icons.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
icon_jira = NotStr("""<svg name="jira" viewBox="0 0 48 48" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<style>.a{fill:none;stroke:currentColor;stroke-linecap:round;stroke-linejoin:round;stroke-width:2}</style>
|
||||
</defs>
|
||||
<path class="a" d="M5.5,22.9722h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556A8.7361,8.7361,0,0,0,25.0278,42.5h0V22.9722Z"/>
|
||||
<path class="a" d="M14.2361,14.2361h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556a8.7361,8.7361,0,0,0,8.7361,8.7361h0V14.2361Z"/>
|
||||
<path class="a" d="M22.9722,5.5h0a8.7361,8.7361,0,0,0,8.7361,8.7361h2.0556v2.0556A8.7361,8.7361,0,0,0,42.5,25.0278h0V5.5Z"/>
|
||||
</svg>""")
|
||||
|
||||
|
||||
icon_msg_info = NotStr("""<svg name="info" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" class="h-6 w-6 shrink-0 stroke-current">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 16h-1v-4h-1m1-4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
""")
|
||||
|
||||
icon_msg_success = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6 shrink-0 stroke-current" fill="none" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||
</svg>
|
||||
""")
|
||||
|
||||
icon_msg_warning = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6 shrink-0 stroke-current" fill="none" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||
</svg>
|
||||
""")
|
||||
|
||||
icon_msg_error = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" class="h-6 w-6 shrink-0 stroke-current" fill="none" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||
</svg>
|
||||
""")
|
||||
@@ -38,7 +38,39 @@ class AdminCommandManager(BaseCommandManager):
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", boundaries: getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
|
||||
def show_configure_jira(self):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.ConfigureJira}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", boundaries: getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
def save_configure_jira(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJira}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
# The form adds the rest
|
||||
}
|
||||
|
||||
def cancel_configure_jira(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJiraCancel}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def test_jira(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.ConfigureJiraTest}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
class ImportHolidaysCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
|
||||
@@ -4,15 +4,17 @@ from ai.mcp_client import MPC_CLIENTS_IDS
|
||||
from ai.mcp_tools import MCPServerTools
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.admin.admin_db_manager import AdminDbManager
|
||||
from components.admin.assets.icons import icon_jira
|
||||
from components.admin.commands import AdminCommandManager
|
||||
from components.admin.components.AdminForm import AdminFormItem, AdminFormType, AdminForm
|
||||
from components.admin.components.AdminForm import AdminFormItem, AdminFormType, AdminForm, AdminButton, AdminMessageType
|
||||
from components.admin.components.ImportHolidays import ImportHolidays
|
||||
from components.admin.constants import ADMIN_INSTANCE_ID, ADMIN_AI_BUDDY_INSTANCE_ID, ADMIN_IMPORT_HOLIDAYS_INSTANCE_ID
|
||||
from components.admin.constants import ADMIN_INSTANCE_ID, ADMIN_AI_BUDDY_INSTANCE_ID, ADMIN_JIRA_INSTANCE_ID
|
||||
from components.aibuddy.assets.icons import icon_brain_ok
|
||||
from components.hoildays.assets.icons import icon_holidays
|
||||
from components.tabs.components.MyTabs import MyTabs
|
||||
from components_helpers import mk_ellipsis, mk_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.jira import Jira
|
||||
|
||||
|
||||
class Admin(BaseComponent):
|
||||
@@ -35,7 +37,7 @@ class Admin(BaseComponent):
|
||||
hooks = {
|
||||
"on_ok": self.commands.save_ai_buddy(),
|
||||
"on_cancel": self.commands.cancel_ai_buddy(),
|
||||
"ok_title": "Apply"
|
||||
"ok_title": "Apply",
|
||||
}
|
||||
form = InstanceManager.get(self._session,
|
||||
AdminForm.create_component_id(self._session, prefix=self._id),
|
||||
@@ -59,8 +61,33 @@ class Admin(BaseComponent):
|
||||
|
||||
return self._add_tab(ADMIN_AI_BUDDY_INSTANCE_ID, "Admin - Import Holidays", form)
|
||||
|
||||
def show_configure_jira(self, boundaries):
|
||||
fields = [
|
||||
AdminFormItem('user_name', "Email", "Email used to connect to JIRA.", AdminFormType.TEXT),
|
||||
AdminFormItem("api_token", "API Key", "API Key to connect to JIRA.", AdminFormType.TEXT),
|
||||
]
|
||||
hooks = {
|
||||
"on_ok": self.commands.save_configure_jira(),
|
||||
"on_cancel": self.commands.cancel_configure_jira(),
|
||||
"ok_title": "Apply",
|
||||
"extra_buttons": [AdminButton("Test", self.commands.test_jira)]
|
||||
}
|
||||
|
||||
form = InstanceManager.get(self._session,
|
||||
AdminForm.create_component_id(self._session, prefix=self._id),
|
||||
AdminForm,
|
||||
owner=self,
|
||||
title="Jira Configuration Page",
|
||||
obj=self.db.jira,
|
||||
form_fields=fields,
|
||||
hooks=hooks,
|
||||
key=ADMIN_JIRA_INSTANCE_ID,
|
||||
boundaries=boundaries
|
||||
)
|
||||
return self._add_tab(ADMIN_JIRA_INSTANCE_ID, "Admin - Jira Configuration", form)
|
||||
|
||||
def update_ai_buddy_settings(self, values: dict):
|
||||
values = self.manage_lists(values)
|
||||
values = AdminForm.get_fields_values(values)
|
||||
self.db.ai_buddy.update(values, ignore_missing=True)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
@@ -69,6 +96,27 @@ class Admin(BaseComponent):
|
||||
self.tabs_manager.remove_tab(tab_id)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
def update_jira_settings(self, values: dict):
|
||||
values = AdminForm.get_fields_values(values)
|
||||
self.db.jira.update(values, ignore_missing=True)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
def cancel_jira_settings(self):
|
||||
tab_id = self.tabs_manager.get_tab_id(ADMIN_JIRA_INSTANCE_ID)
|
||||
self.tabs_manager.remove_tab(tab_id)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
def test_jira_settings(self, values: dict):
|
||||
values = AdminForm.get_fields_values(values)
|
||||
jira = Jira(values["user_name"], values["api_token"])
|
||||
form = self.tabs_manager.get_tab_content_by_key(ADMIN_JIRA_INSTANCE_ID)
|
||||
res = jira.test()
|
||||
if res.status_code == 200:
|
||||
form.set_message("Success !", AdminMessageType.SUCCESS)
|
||||
else:
|
||||
form.set_message(f"Error {res.status_code} - {res.text}", AdminMessageType.ERROR)
|
||||
return self.tabs_manager.render()
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
Div(cls="divider"),
|
||||
@@ -84,6 +132,11 @@ class Admin(BaseComponent):
|
||||
mk_ellipsis("holidays", cls="text-sm", **self.commands.show_import_holidays()),
|
||||
cls="flex p-0 min-h-0 truncate",
|
||||
),
|
||||
Div(
|
||||
mk_icon(icon_jira, can_select=False),
|
||||
mk_ellipsis("jira", cls="text-sm", **self.commands.show_configure_jira()),
|
||||
cls="flex p-0 min-h-0 truncate",
|
||||
),
|
||||
#
|
||||
# cls=""),
|
||||
# Script(f"bindAdmin('{self._id}')"),
|
||||
@@ -97,40 +150,3 @@ class Admin(BaseComponent):
|
||||
@staticmethod
|
||||
def create_component_id(session):
|
||||
return f"{ADMIN_INSTANCE_ID}{session['user_id']}"
|
||||
|
||||
@staticmethod
|
||||
def manage_lists(data_dict):
|
||||
"""
|
||||
Processes a dictionary of key-value pairs to reorganize keys based on specific
|
||||
criteria. If a key ends with its corresponding string value, the method extracts
|
||||
the prefix of the key (the portion of the key before the value) and groups the
|
||||
value under this prefix in a list. Otherwise, the original key-value pair is
|
||||
preserved in the resulting dictionary.
|
||||
|
||||
:param data_dict: Dictionary where each key is a string and its corresponding
|
||||
value can be of any type.
|
||||
:type data_dict: dict
|
||||
:return: A dictionary where the keys have been categorized into groups
|
||||
based on whether they end with the same string value, reorganized into
|
||||
lists, while preserving other key-value pairs as they are.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
result_dict = {}
|
||||
|
||||
for key, value in data_dict.items():
|
||||
# Check if the value is a string and the key ends with the value
|
||||
if isinstance(value, str) and key.endswith(value):
|
||||
# Find the beginning part of the key (before the value)
|
||||
prefix = key.replace(value, '').rstrip('_')
|
||||
|
||||
# Add the value to the list under the prefix key
|
||||
if prefix not in result_dict:
|
||||
result_dict[prefix] = []
|
||||
|
||||
result_dict[prefix].append(value)
|
||||
|
||||
else:
|
||||
result_dict[key] = value
|
||||
|
||||
return result_dict
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Any, Callable
|
||||
|
||||
from fasthtml.components import *
|
||||
|
||||
from assets.icons import icon_error
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components_helpers import set_boundaries, mk_dialog_buttons, safe_get_dialog_buttons_parameters
|
||||
from components.admin.assets.icons import icon_msg_success, icon_msg_info, icon_msg_error, icon_msg_warning
|
||||
from components_helpers import apply_boundaries, mk_dialog_buttons, safe_get_dialog_buttons_parameters, mk_icon
|
||||
from core.utils import get_unique_id
|
||||
|
||||
|
||||
@@ -18,6 +20,14 @@ class AdminFormType:
|
||||
TEXTAREA = "textarea"
|
||||
|
||||
|
||||
class AdminMessageType:
|
||||
NONE = "none"
|
||||
SUCCESS = "success"
|
||||
ERROR = "error"
|
||||
INFO = "info"
|
||||
WARNING = "warning"
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdminFormItem:
|
||||
name: str
|
||||
@@ -27,6 +37,12 @@ class AdminFormItem:
|
||||
possible_values: list[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AdminButton:
|
||||
title: str
|
||||
on_click: Callable = None
|
||||
|
||||
|
||||
class AdminForm(BaseComponent):
|
||||
def __init__(self, session, _id, owner, title: str, obj: Any, form_fields: list[AdminFormItem], hooks=None, key=None,
|
||||
boundaries=None):
|
||||
@@ -38,6 +54,21 @@ class AdminForm(BaseComponent):
|
||||
self.title = title
|
||||
self.obj = obj
|
||||
self.form_fields = form_fields
|
||||
self.message = None
|
||||
|
||||
def set_message(self, message, msg_type: AdminMessageType.NONE):
|
||||
if msg_type == AdminMessageType.NONE:
|
||||
self.message = message
|
||||
else:
|
||||
if msg_type == AdminMessageType.SUCCESS:
|
||||
icon = icon_msg_success
|
||||
elif msg_type == AdminMessageType.ERROR:
|
||||
icon = icon_msg_error
|
||||
elif msg_type == AdminMessageType.WARNING:
|
||||
icon = icon_msg_warning
|
||||
else:
|
||||
icon = icon_msg_info
|
||||
self.message = Div(icon, Span(message), role=msg_type, cls=f"alert alert-{msg_type} mr-2")
|
||||
|
||||
def mk_input(self, item: AdminFormItem):
|
||||
return Input(
|
||||
@@ -62,7 +93,7 @@ class AdminForm(BaseComponent):
|
||||
cls="checkbox checkbox-xs",
|
||||
checked=value in current_values
|
||||
),
|
||||
|
||||
|
||||
cls="checkbox-item") for value in item.possible_values]
|
||||
|
||||
return Div(*checkbox_items, cls="adm-items-group")
|
||||
@@ -95,9 +126,20 @@ class AdminForm(BaseComponent):
|
||||
else:
|
||||
return self.mk_input(item)
|
||||
|
||||
def mk_extra_buttons(self):
|
||||
extra_buttons = self._hooks.get("extra_buttons", None)
|
||||
if not extra_buttons:
|
||||
return None
|
||||
|
||||
return Div(
|
||||
*[Button(btn.title, cls="btn btn-ghost btn-sm", **btn.on_click()) for btn in extra_buttons],
|
||||
cls="flex justify-end"
|
||||
)
|
||||
|
||||
def __ft__(self):
|
||||
return Form(
|
||||
Fieldset(Legend(self.title, cls="fieldset-legend"),
|
||||
Div(self.message),
|
||||
*[
|
||||
Div(
|
||||
Label(item.title, cls="label"),
|
||||
@@ -107,8 +149,9 @@ class AdminForm(BaseComponent):
|
||||
|
||||
for item in self.form_fields
|
||||
],
|
||||
self.mk_extra_buttons(),
|
||||
mk_dialog_buttons(**safe_get_dialog_buttons_parameters(self._hooks)),
|
||||
**set_boundaries(self._boundaries),
|
||||
**apply_boundaries(self._boundaries),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box w-xs border p-4"
|
||||
)
|
||||
)
|
||||
@@ -119,3 +162,40 @@ class AdminForm(BaseComponent):
|
||||
suffix = get_unique_id()
|
||||
|
||||
return f"{prefix}{suffix}"
|
||||
|
||||
@staticmethod
|
||||
def get_fields_values(data_dict):
|
||||
"""
|
||||
Processes a dictionary of key-value pairs to reorganize keys based on specific
|
||||
criteria. If a key ends with its corresponding string value, the method extracts
|
||||
the prefix of the key (the portion of the key before the value) and groups the
|
||||
value under this prefix in a list. Otherwise, the original key-value pair is
|
||||
preserved in the resulting dictionary.
|
||||
|
||||
:param data_dict: Dictionary where each key is a string and its corresponding
|
||||
value can be of any type.
|
||||
:type data_dict: dict
|
||||
:return: A dictionary where the keys have been categorized into groups
|
||||
based on whether they end with the same string value, reorganized into
|
||||
lists, while preserving other key-value pairs as they are.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
result_dict = {}
|
||||
|
||||
for key, value in data_dict.items():
|
||||
# Check if the value is a string and the key ends with the value
|
||||
if isinstance(value, str) and key.endswith(value):
|
||||
# Find the beginning part of the key (before the value)
|
||||
prefix = key.replace(value, '').rstrip('_')
|
||||
|
||||
# Add the value to the list under the prefix key
|
||||
if prefix not in result_dict:
|
||||
result_dict[prefix] = []
|
||||
|
||||
result_dict[prefix].append(value)
|
||||
|
||||
else:
|
||||
result_dict[key] = value
|
||||
|
||||
return result_dict
|
||||
|
||||
@@ -9,7 +9,7 @@ from components.datagrid_new.components.DataGrid import DataGrid
|
||||
from components.datagrid_new.settings import DataGridSettings
|
||||
from components.hoildays.helpers.nibelisparser import NibelisParser
|
||||
from components.repositories.constants import USERS_REPOSITORY_NAME, HOLIDAYS_TABLE_NAME
|
||||
from components_helpers import mk_dialog_buttons, set_boundaries
|
||||
from components_helpers import mk_dialog_buttons, apply_boundaries
|
||||
from core.instance_manager import InstanceManager
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ class ImportHolidays(BaseComponent):
|
||||
mk_dialog_buttons(ok_title="Import", cls="mt-2", on_ok=self.commands.import_holidays()),
|
||||
id=self._id,
|
||||
cls="m-2",
|
||||
**set_boundaries(self._boundaries, other=26),
|
||||
**apply_boundaries(self._boundaries, other=26),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
ADMIN_INSTANCE_ID = "__Admin__"
|
||||
ADMIN_AI_BUDDY_INSTANCE_ID = "__AdminAIBuddy__"
|
||||
ADMIN_IMPORT_HOLIDAYS_INSTANCE_ID = "__AdminImportHolidays__"
|
||||
ADMIN_JIRA_INSTANCE_ID = "__AdminJira__"
|
||||
ROUTE_ROOT = "/admin"
|
||||
ADMIN_SETTINGS_ENTRY = "Admin"
|
||||
|
||||
@@ -8,4 +9,7 @@ class Routes:
|
||||
AiBuddy = "/ai-buddy"
|
||||
AiBuddyCancel = "/ai-buddy-cancel"
|
||||
ImportHolidays = "/import-holidays"
|
||||
PasteHolidays = "/paste-holidays"
|
||||
PasteHolidays = "/paste-holidays"
|
||||
ConfigureJira = "/configure-jira"
|
||||
ConfigureJiraCancel = "/configure-jira-cancel"
|
||||
ConfigureJiraTest = "/configure-jira-test"
|
||||
|
||||
@@ -39,7 +39,9 @@ function bindAIBuddy(elementId) {
|
||||
event.preventDefault();
|
||||
makeAIRequest();
|
||||
}
|
||||
});
|
||||
|
||||
document.addEventListener('keyup', (event) => {
|
||||
if (event.key === 'Shift') {
|
||||
const currentTime = new Date().getTime();
|
||||
if (currentTime - lastShiftPress <= doublePressDelay) {
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fasthtml.components import Div, sse_message
|
||||
from fasthtml.core import EventStream
|
||||
from fasthtml.fastapp import fast_app
|
||||
from starlette.datastructures import UploadFile
|
||||
|
||||
@@ -136,3 +139,16 @@ def post(session, _id: str, state: str, args: str = None):
|
||||
logger.debug(f"Entering on_state_changed with args {_id=}, {state=}, {args=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.manage_state_changed(state, args)
|
||||
|
||||
|
||||
@rt(Routes.YieldRow)
|
||||
async def get(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.YieldRow} with args {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return EventStream(instance.mk_body_content_sse())
|
||||
|
||||
@rt(Routes.GetPage)
|
||||
def get(session, _id: str, page_index: int):
|
||||
logger.debug(f"Entering {Routes.GetPage} with args {_id=}, {page_index=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.mk_body_content_page(page_index)
|
||||
@@ -1,6 +1,6 @@
|
||||
function bindDatagrid(datagridId, allowColumnsReordering) {
|
||||
bindScrollbars(datagridId);
|
||||
makeResizable(datagridId)
|
||||
manageScrollbars(datagridId, true);
|
||||
makeResizable(datagridId);
|
||||
}
|
||||
|
||||
function bindScrollbars(datagridId) {
|
||||
@@ -21,7 +21,7 @@ function bindScrollbars(datagridId) {
|
||||
const table = datagrid.querySelector(".dt2-table");
|
||||
|
||||
if (!verticalScrollbar || !verticalWrapper || !horizontalScrollbar || !horizontalWrapper || !body || !table) {
|
||||
console.error("Essential scrollbar or content elements are missing in the datagrid.");
|
||||
console.error("Essential scrollbars or content elements are missing in the datagrid.");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -176,6 +176,224 @@ function bindScrollbars(datagridId) {
|
||||
});
|
||||
}
|
||||
|
||||
function manageScrollbars(datagridId, binding) {
|
||||
console.debug("manageScrollbars on element " + datagridId + " with binding=" + binding);
|
||||
|
||||
const datagrid = document.getElementById(datagridId);
|
||||
|
||||
if (!datagrid) {
|
||||
console.error(`Datagrid with id "${datagridId}" not found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const verticalScrollbar = datagrid.querySelector(".dt2-scrollbars-vertical");
|
||||
const verticalWrapper = datagrid.querySelector(".dt2-scrollbars-vertical-wrapper");
|
||||
const horizontalScrollbar = datagrid.querySelector(".dt2-scrollbars-horizontal");
|
||||
const horizontalWrapper = datagrid.querySelector(".dt2-scrollbars-horizontal-wrapper");
|
||||
const body = datagrid.querySelector(".dt2-body");
|
||||
const table = datagrid.querySelector(".dt2-table");
|
||||
|
||||
if (!verticalScrollbar || !verticalWrapper || !horizontalScrollbar || !horizontalWrapper || !body || !table) {
|
||||
console.error("Essential scrollbars or content elements are missing in the datagrid.");
|
||||
return;
|
||||
}
|
||||
|
||||
const computeScrollbarVisibility = () => {
|
||||
// Determine if the content is clipped
|
||||
const isVerticalRequired = body.scrollHeight > body.clientHeight;
|
||||
const isHorizontalRequired = table.scrollWidth > table.clientWidth;
|
||||
|
||||
// Show or hide the scrollbar wrappers
|
||||
requestAnimationFrame(() => {
|
||||
verticalWrapper.style.display = isVerticalRequired ? "block" : "none";
|
||||
horizontalWrapper.style.display = isHorizontalRequired ? "block" : "none";
|
||||
});
|
||||
};
|
||||
|
||||
const computeScrollbarSize = () => {
|
||||
// Vertical scrollbar height
|
||||
const visibleHeight = body.clientHeight;
|
||||
const totalHeight = body.scrollHeight;
|
||||
const wrapperHeight = verticalWrapper.offsetHeight;
|
||||
|
||||
let scrollbarHeight = 0;
|
||||
if (totalHeight > 0) {
|
||||
scrollbarHeight = (visibleHeight / totalHeight) * wrapperHeight;
|
||||
}
|
||||
|
||||
// Horizontal scrollbar width
|
||||
const visibleWidth = table.clientWidth;
|
||||
const totalWidth = table.scrollWidth;
|
||||
const wrapperWidth = horizontalWrapper.offsetWidth;
|
||||
|
||||
let scrollbarWidth = 0;
|
||||
if (totalWidth > 0) {
|
||||
scrollbarWidth = (visibleWidth / totalWidth) * wrapperWidth;
|
||||
}
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
verticalScrollbar.style.height = `${scrollbarHeight}px`;
|
||||
horizontalScrollbar.style.width = `${scrollbarWidth}px`;
|
||||
});
|
||||
};
|
||||
|
||||
const updateVerticalScrollbarForMouseWheel = () => {
|
||||
const maxScrollTop = body.scrollHeight - body.clientHeight;
|
||||
const wrapperHeight = verticalWrapper.offsetHeight;
|
||||
|
||||
if (maxScrollTop > 0) {
|
||||
const scrollRatio = wrapperHeight / body.scrollHeight;
|
||||
verticalScrollbar.style.top = `${body.scrollTop * scrollRatio}px`;
|
||||
}
|
||||
};
|
||||
|
||||
if (binding) {
|
||||
// Clean up existing managers if they exist
|
||||
if (datagrid._managers) {
|
||||
// Remove drag events
|
||||
if (datagrid._managers.dragManager) {
|
||||
verticalScrollbar.removeEventListener("mousedown", datagrid._managers.dragManager.verticalMouseDown);
|
||||
horizontalScrollbar.removeEventListener("mousedown", datagrid._managers.dragManager.horizontalMouseDown);
|
||||
document.removeEventListener("mousemove", datagrid._managers.dragManager.mouseMove);
|
||||
document.removeEventListener("mouseup", datagrid._managers.dragManager.mouseUp);
|
||||
}
|
||||
|
||||
// Remove wheel events
|
||||
if (datagrid._managers.wheelManager) {
|
||||
body.removeEventListener("wheel", datagrid._managers.wheelManager.handleWheelScrolling);
|
||||
}
|
||||
|
||||
// Remove resize events
|
||||
if (datagrid._managers.resizeManager) {
|
||||
window.removeEventListener("resize", datagrid._managers.resizeManager.handleResize);
|
||||
}
|
||||
}
|
||||
|
||||
// Create managers
|
||||
const dragManager = {
|
||||
isDragging: false,
|
||||
startY: 0,
|
||||
startX: 0,
|
||||
|
||||
updateVerticalScrollbar: (deltaX, deltaY) => {
|
||||
const wrapperHeight = verticalWrapper.offsetHeight;
|
||||
const scrollbarHeight = verticalScrollbar.offsetHeight;
|
||||
const maxScrollTop = body.scrollHeight - body.clientHeight;
|
||||
const scrollRatio = maxScrollTop / (wrapperHeight - scrollbarHeight);
|
||||
|
||||
let newTop = parseFloat(verticalScrollbar.style.top || "0") + deltaY;
|
||||
newTop = Math.max(0, Math.min(newTop, wrapperHeight - scrollbarHeight));
|
||||
|
||||
verticalScrollbar.style.top = `${newTop}px`;
|
||||
body.scrollTop = newTop * scrollRatio;
|
||||
},
|
||||
|
||||
updateHorizontalScrollbar: (deltaX, deltaY) => {
|
||||
const wrapperWidth = horizontalWrapper.offsetWidth;
|
||||
const scrollbarWidth = horizontalScrollbar.offsetWidth;
|
||||
const maxScrollLeft = table.scrollWidth - table.clientWidth;
|
||||
const scrollRatio = maxScrollLeft / (wrapperWidth - scrollbarWidth);
|
||||
|
||||
let newLeft = parseFloat(horizontalScrollbar.style.left || "0") + deltaX;
|
||||
newLeft = Math.max(0, Math.min(newLeft, wrapperWidth - scrollbarWidth));
|
||||
|
||||
horizontalScrollbar.style.left = `${newLeft}px`;
|
||||
table.scrollLeft = newLeft * scrollRatio;
|
||||
},
|
||||
|
||||
verticalMouseDown: (e) => {
|
||||
disableTooltip();
|
||||
dragManager.isDragging = true;
|
||||
dragManager.startY = e.clientY;
|
||||
dragManager.startX = e.clientX;
|
||||
document.body.style.userSelect = "none";
|
||||
verticalScrollbar.classList.add("dt2-dragging");
|
||||
},
|
||||
|
||||
horizontalMouseDown: (e) => {
|
||||
disableTooltip();
|
||||
dragManager.isDragging = true;
|
||||
dragManager.startY = e.clientY;
|
||||
dragManager.startX = e.clientX;
|
||||
document.body.style.userSelect = "none";
|
||||
horizontalScrollbar.classList.add("dt2-dragging");
|
||||
},
|
||||
|
||||
mouseMove: (e) => {
|
||||
if (dragManager.isDragging) {
|
||||
const deltaY = e.clientY - dragManager.startY;
|
||||
const deltaX = e.clientX - dragManager.startX;
|
||||
|
||||
// Determine which scrollbar is being dragged
|
||||
if (verticalScrollbar.classList.contains("dt2-dragging")) {
|
||||
dragManager.updateVerticalScrollbar(deltaX, deltaY);
|
||||
} else if (horizontalScrollbar.classList.contains("dt2-dragging")) {
|
||||
dragManager.updateHorizontalScrollbar(deltaX, deltaY);
|
||||
}
|
||||
|
||||
// Reset start points for next update
|
||||
dragManager.startY = e.clientY;
|
||||
dragManager.startX = e.clientX;
|
||||
}
|
||||
},
|
||||
|
||||
mouseUp: () => {
|
||||
dragManager.isDragging = false;
|
||||
document.body.style.userSelect = "";
|
||||
verticalScrollbar.classList.remove("dt2-dragging");
|
||||
horizontalScrollbar.classList.remove("dt2-dragging");
|
||||
enableTooltip();
|
||||
}
|
||||
};
|
||||
|
||||
const wheelManager = {
|
||||
handleWheelScrolling: (event) => {
|
||||
const deltaX = event.deltaX;
|
||||
const deltaY = event.deltaY;
|
||||
|
||||
// Scroll the body and table content
|
||||
body.scrollTop += deltaY; // Vertical scrolling
|
||||
table.scrollLeft += deltaX; // Horizontal scrolling
|
||||
|
||||
// Update the vertical scrollbar position
|
||||
updateVerticalScrollbarForMouseWheel();
|
||||
|
||||
// Prevent default behavior to fully manage the scroll
|
||||
event.preventDefault();
|
||||
}
|
||||
};
|
||||
|
||||
const resizeManager = {
|
||||
handleResize: () => {
|
||||
computeScrollbarVisibility();
|
||||
computeScrollbarSize();
|
||||
updateVerticalScrollbarForMouseWheel();
|
||||
}
|
||||
};
|
||||
|
||||
// Store managers on datagrid for cleanup
|
||||
datagrid._managers = {
|
||||
dragManager,
|
||||
wheelManager,
|
||||
resizeManager
|
||||
};
|
||||
|
||||
// Bind events
|
||||
verticalScrollbar.addEventListener("mousedown", dragManager.verticalMouseDown);
|
||||
horizontalScrollbar.addEventListener("mousedown", dragManager.horizontalMouseDown);
|
||||
document.addEventListener("mousemove", dragManager.mouseMove);
|
||||
document.addEventListener("mouseup", dragManager.mouseUp);
|
||||
|
||||
body.addEventListener("wheel", wheelManager.handleWheelScrolling, {passive: false});
|
||||
|
||||
window.addEventListener("resize", resizeManager.handleResize);
|
||||
}
|
||||
|
||||
// Always execute computations
|
||||
computeScrollbarVisibility();
|
||||
computeScrollbarSize();
|
||||
}
|
||||
|
||||
function makeResizable(datagridId) {
|
||||
console.debug("makeResizable on element " + datagridId);
|
||||
|
||||
@@ -494,4 +712,5 @@ function onAfterSettle(datagridId, event) {
|
||||
if (response.includes("hx-on::before-settle")) {
|
||||
bindDatagrid(datagridId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import copy
|
||||
import html
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from typing import Literal, Any
|
||||
@@ -20,9 +21,10 @@ from components.datagrid_new.db_management import DataGridDbManager
|
||||
from components.datagrid_new.settings import DataGridRowState, DataGridColumnState, \
|
||||
DataGridFooterConf, DataGridState, DataGridSettings, DatagridView
|
||||
from components_helpers import mk_icon, mk_ellipsis
|
||||
from core.fasthtml_helper import MyDiv, mk_my_ellipsis, MySpan, mk_my_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager
|
||||
from core.utils import get_unique_id, make_safe_id
|
||||
from core.utils import get_unique_id, make_safe_id, timed
|
||||
|
||||
logger = logging.getLogger("DataGrid")
|
||||
|
||||
@@ -59,6 +61,8 @@ class DataGrid(BaseComponent):
|
||||
self._state: DataGridState = self._db.load_state()
|
||||
self._settings: DataGridSettings = grid_settings or self._db.load_settings()
|
||||
self._df: DataFrame | None = self._db.load_dataframe()
|
||||
self._fast_access = self._init_fast_access(self._df)
|
||||
self._total_rows = len(self._df) if self._df is not None else 0
|
||||
|
||||
# update boundaries if possible
|
||||
self.set_boundaries(boundaries)
|
||||
@@ -118,14 +122,23 @@ class DataGrid(BaseComponent):
|
||||
else:
|
||||
return ColumnType.Text # Default to Text if no match
|
||||
|
||||
def _init_columns(_df):
|
||||
columns = [DataGridColumnState(make_safe_id(col_id),
|
||||
col_index,
|
||||
col_id,
|
||||
_get_column_type(self._df[make_safe_id(col_id)].dtype))
|
||||
for col_index, col_id in enumerate(_df.columns)]
|
||||
if self._state.row_index:
|
||||
columns.insert(0, DataGridColumnState(make_safe_id(ROW_INDEX_ID), -1, " ", ColumnType.RowIndex))
|
||||
|
||||
return columns
|
||||
|
||||
self._df = df.copy()
|
||||
self._df.columns = self._df.columns.map(make_safe_id) # make sure column names are trimmed
|
||||
self._state.rows = [DataGridRowState(row_id) for row_id in self._df.index]
|
||||
self._state.columns = [DataGridColumnState(make_safe_id(col_id),
|
||||
col_index,
|
||||
col_id,
|
||||
_get_column_type(self._df[make_safe_id(col_id)].dtype))
|
||||
for col_index, col_id in enumerate(df.columns)]
|
||||
self._state.columns = _init_columns(df) # use df not self._df to keep the original title
|
||||
self._fast_access = self._init_fast_access(self._df)
|
||||
self._total_rows = len(self._df) if self._df is not None else 0
|
||||
|
||||
if save_state:
|
||||
self._db.save_all(None, self._state, self._df)
|
||||
@@ -205,6 +218,7 @@ class DataGrid(BaseComponent):
|
||||
|
||||
self._state.columns = new_columns_states
|
||||
|
||||
self._fast_access = self._init_fast_access(self._df)
|
||||
self._views.recompute_need_save()
|
||||
|
||||
self._db.save_all(self._settings, self._state, self._df if new_column else None)
|
||||
@@ -386,6 +400,7 @@ class DataGrid(BaseComponent):
|
||||
id=f"scb_{self._id}",
|
||||
)
|
||||
|
||||
@timed
|
||||
def mk_table(self, oob=False):
|
||||
htmx_extra_params = {
|
||||
"hx-on::before-settle": f"onAfterSettle('{self._id}', event);",
|
||||
@@ -439,7 +454,7 @@ class DataGrid(BaseComponent):
|
||||
_mk_keyboard_management(),
|
||||
Div(
|
||||
self.mk_table_header(),
|
||||
self.mk_table_body(),
|
||||
self.mk_table_body_page(),
|
||||
self.mk_table_footer(),
|
||||
cls="dt2-inner-table"),
|
||||
cls="dt2-table",
|
||||
@@ -479,20 +494,18 @@ class DataGrid(BaseComponent):
|
||||
id=f"th_{self._id}"
|
||||
)
|
||||
|
||||
def mk_table_body(self):
|
||||
df = self._get_filtered_df()
|
||||
def mk_table_body_page(self):
|
||||
"""
|
||||
This function is used to update the table body when the vertical scrollbar reaches the end
|
||||
A new page is added when requested
|
||||
"""
|
||||
max_height = self._compute_body_max_height()
|
||||
|
||||
return Div(
|
||||
*[Div(
|
||||
*[self.mk_body_cell(col_pos, row_index, col_def) for col_pos, col_def in enumerate(self._state.columns)],
|
||||
cls="dt2-row",
|
||||
data_row=f"{row_index}",
|
||||
id=f"tr_{self._id}-{row_index}",
|
||||
) for row_index in df.index],
|
||||
*self.mk_body_content_page(0),
|
||||
cls="dt2-body",
|
||||
style=f"max-height:{max_height}px;",
|
||||
id=f"tb_{self._id}"
|
||||
id=f"tb_{self._id}",
|
||||
)
|
||||
|
||||
def mk_table_footer(self):
|
||||
@@ -507,34 +520,55 @@ class DataGrid(BaseComponent):
|
||||
id=f"tf_{self._id}"
|
||||
)
|
||||
|
||||
def mk_body_content_page(self, page_index: int):
|
||||
df = self._get_filtered_df()
|
||||
start = page_index * DATAGRID_PAGE_SIZE
|
||||
end = start + DATAGRID_PAGE_SIZE
|
||||
if self._total_rows > end:
|
||||
last_row = df.index[end - 1]
|
||||
else:
|
||||
last_row = None
|
||||
|
||||
rows = [Div(
|
||||
*[self.mk_body_cell(col_pos, row_index, col_def) for col_pos, col_def in enumerate(self._state.columns)],
|
||||
cls="dt2-row",
|
||||
data_row=f"{row_index}",
|
||||
id=f"tr_{self._id}-{row_index}",
|
||||
**self.commands.get_page(page_index + 1) if row_index == last_row else {}
|
||||
) for row_index in df.index[start:end]]
|
||||
|
||||
rows.append(Script(f"manageScrollbars('{self._id}', false);"), )
|
||||
|
||||
return rows
|
||||
|
||||
def mk_body_cell(self, col_pos, row_index, col_def: DataGridColumnState):
|
||||
if not col_def.usable:
|
||||
return None
|
||||
|
||||
if not col_def.visible:
|
||||
return Div(cls="dt2-col-hidden")
|
||||
return MyDiv(cls="dt2-col-hidden")
|
||||
|
||||
content = self.mk_body_cell_content(col_pos, row_index, col_def)
|
||||
|
||||
return Div(content,
|
||||
data_col=col_def.col_id,
|
||||
style=f"width:{col_def.width}px;",
|
||||
cls="dt2-cell")
|
||||
return MyDiv(content,
|
||||
data_col=col_def.col_id,
|
||||
style=f"width:{col_def.width}px;",
|
||||
cls="dt2-cell")
|
||||
|
||||
def mk_body_cell_content(self, col_pos, row_index, col_def: DataGridColumnState):
|
||||
|
||||
def mk_bool(value):
|
||||
return Div(mk_icon(icon_checked if value else icon_unchecked, can_select=False),
|
||||
cls="dt2-cell-content-checkbox")
|
||||
def mk_bool(_value):
|
||||
return MyDiv(mk_my_icon(icon_checked if _value else icon_unchecked, can_select=False),
|
||||
cls="dt2-cell-content-checkbox")
|
||||
|
||||
def mk_text(value):
|
||||
return mk_ellipsis(value, cls="dt2-cell-content-text")
|
||||
def mk_text(_value):
|
||||
return mk_my_ellipsis(_value, cls="dt2-cell-content-text")
|
||||
|
||||
def mk_number(value):
|
||||
return mk_ellipsis(value, cls="dt2-cell-content-number")
|
||||
def mk_number(_value):
|
||||
return mk_my_ellipsis(_value, cls="dt2-cell-content-number")
|
||||
|
||||
def process_cell_content(value):
|
||||
value_str = str(value)
|
||||
def process_cell_content(_value):
|
||||
value_str = html.escape(str(_value))
|
||||
|
||||
if FILTER_INPUT_CID not in self._state.filtered or (
|
||||
keyword := self._state.filtered[FILTER_INPUT_CID]) is None:
|
||||
@@ -545,21 +579,22 @@ class DataGrid(BaseComponent):
|
||||
return value_str
|
||||
|
||||
len_keyword = len(keyword)
|
||||
res = [Span(value_str[:index])] if index > 0 else []
|
||||
res += [Span(value_str[index:index + len_keyword], cls="dt2-highlight-1")]
|
||||
res += [Span(value_str[index + len_keyword:])] if len(value_str) > len_keyword else []
|
||||
res = [MySpan(value_str[:index])] if index > 0 else []
|
||||
res += [MySpan(value_str[index:index + len_keyword], cls="dt2-highlight-1")]
|
||||
res += [MySpan(value_str[index + len_keyword:])] if len(value_str) > len_keyword else []
|
||||
return tuple(res)
|
||||
|
||||
column_type = col_def.type
|
||||
value = self._fast_access[col_def.col_id][row_index]
|
||||
|
||||
if column_type == ColumnType.Bool:
|
||||
content = mk_bool(self._df.iloc[row_index, col_def.col_index])
|
||||
content = mk_bool(value)
|
||||
elif column_type == ColumnType.Number:
|
||||
content = mk_number(process_cell_content(self._df.iloc[row_index, col_def.col_index]))
|
||||
content = mk_number(process_cell_content(value))
|
||||
elif column_type == ColumnType.RowIndex:
|
||||
content = mk_number(row_index)
|
||||
else:
|
||||
content = mk_text(process_cell_content(self._df.iloc[row_index, col_def.col_index]))
|
||||
content = mk_text(process_cell_content(value))
|
||||
|
||||
return content
|
||||
|
||||
@@ -822,6 +857,31 @@ class DataGrid(BaseComponent):
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _init_fast_access(df):
|
||||
"""
|
||||
Generates a fast-access dictionary for a DataFrame.
|
||||
|
||||
This method converts the columns of the provided DataFrame into NumPy arrays
|
||||
and stores them as values in a dictionary, using the column names as keys.
|
||||
This allows for efficient access to the data stored in the DataFrame.
|
||||
|
||||
Args:
|
||||
df (DataFrame): The input pandas DataFrame whose columns are to be converted
|
||||
into a dictionary of NumPy arrays.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary where the keys are the column names of the input DataFrame
|
||||
and the values are the corresponding column values as NumPy arrays.
|
||||
"""
|
||||
if df is None:
|
||||
return {}
|
||||
|
||||
res = {col: df[col].to_numpy() for col in df.columns}
|
||||
res[ROW_INDEX_ID] = df.index.to_numpy()
|
||||
return res
|
||||
|
||||
@timed
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
Div(
|
||||
@@ -844,7 +904,7 @@ class DataGrid(BaseComponent):
|
||||
@staticmethod
|
||||
def new(session, data, index=None):
|
||||
datagrid = DataGrid(session, DataGrid.create_component_id(session))
|
||||
#dataframe = DataFrame(data, index=index)
|
||||
# dataframe = DataFrame(data, index=index)
|
||||
dataframe = DataFrame(data)
|
||||
datagrid.init_from_dataframe(dataframe)
|
||||
return datagrid
|
||||
|
||||
@@ -91,12 +91,21 @@ class DataGridCommandManager(BaseCommandManager):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.OnClick}",
|
||||
"hx-target": f"#tsm_{self._id}",
|
||||
"hx-trigger" : "click",
|
||||
"hx-trigger": "click",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{_id: "{self._id}", cell_id:getCellId(event), modifier:getClickModifier(event), boundaries: getCellBoundaries(event)}}',
|
||||
"hx-on::before-request": f'validateOnClickRequest("{self._id}", event)',
|
||||
}
|
||||
|
||||
def get_page(self, page_index=0):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.GetPage}",
|
||||
"hx-target": f"#tb_{self._id}",
|
||||
"hx-swap": "beforeend",
|
||||
"hx-vals": f'{{"_id": "{self._id}", "page_index": "{page_index}"}}',
|
||||
"hx-trigger": f"intersect root:#tb_{self._id} once",
|
||||
}
|
||||
|
||||
def _get_hide_show_columns_attrs(self, mode, col_defs: list, new_value, cls=""):
|
||||
str_col_names = ", ".join(f"'{col_def.title}'" for col_def in col_defs)
|
||||
tooltip_msg = f"{mode} column{'s' if len(col_defs) > 1 else ''} {str_col_names}"
|
||||
@@ -165,4 +174,4 @@ class FilterAllCommands(BaseCommandManager):
|
||||
"hx_vals": f'{{"_id": "{self._id}", "col_id":"{FILTER_INPUT_CID}"}}',
|
||||
"data_tooltip": "Reset filter",
|
||||
"cls": self.merge_class(cls, "mmt-tooltip"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,9 @@ CONTAINER_HEIGHT = "container_height"
|
||||
|
||||
DATAGRID_STATE_FOOTER = "footer"
|
||||
|
||||
DATAGRID_PAGE_SIZE = 50
|
||||
|
||||
ROW_INDEX_ID = "__row_index__"
|
||||
|
||||
class Routes:
|
||||
Filter = "/filter" # request the filtering in the grid
|
||||
@@ -33,6 +36,8 @@ class Routes:
|
||||
UpdateView = "/update_view"
|
||||
ShowFooterMenu = "/show_footer_menu"
|
||||
UpdateState = "/update_state"
|
||||
YieldRow = "/yield-row"
|
||||
GetPage = "/page"
|
||||
|
||||
|
||||
class ColumnType(Enum):
|
||||
@@ -44,11 +49,13 @@ class ColumnType(Enum):
|
||||
Choice = "Choice"
|
||||
List = "List"
|
||||
|
||||
|
||||
class ViewType(Enum):
|
||||
Table = "Table"
|
||||
Chart = "Chart"
|
||||
Form = "Form"
|
||||
|
||||
|
||||
class FooterAggregation(Enum):
|
||||
Sum = "Sum"
|
||||
Mean = "Mean"
|
||||
@@ -59,4 +66,4 @@ class FooterAggregation(Enum):
|
||||
FilteredMean = "FilteredMean"
|
||||
FilteredMin = "FilteredMin"
|
||||
FilteredMax = "FilteredMax"
|
||||
FilteredCount = "FilteredCount"
|
||||
FilteredCount = "FilteredCount"
|
||||
|
||||
@@ -29,14 +29,24 @@ class DataGridDbManager:
|
||||
def __init__(self, session: dict, settings_manager: SettingsManager, key: tuple):
|
||||
self._session = session
|
||||
self._settings_manager = settings_manager
|
||||
self._key = "#".join(make_safe_id(item) for item in key) if key else ""
|
||||
self._key = self._key_as_string(key)
|
||||
|
||||
# init the db if needed
|
||||
if self._settings_manager and not self._settings_manager.exists(self._session, self._get_db_entry()):
|
||||
self._settings_manager.save(self._session, self._get_db_entry(), {})
|
||||
|
||||
def _get_db_entry(self):
|
||||
return f"{DATAGRID_DB_ENTRY}_{self._key}"
|
||||
return make_safe_id(f"{DATAGRID_DB_ENTRY}_{self._key}")
|
||||
|
||||
@staticmethod
|
||||
def _key_as_string(key):
|
||||
if not key:
|
||||
return ""
|
||||
|
||||
if isinstance(key, tuple):
|
||||
return "#".join(make_safe_id(item) for item in key)
|
||||
|
||||
return make_safe_id(key)
|
||||
|
||||
def save_settings(self, settings: DataGridSettings):
|
||||
if self._settings_manager is None:
|
||||
|
||||
@@ -69,6 +69,7 @@ class DataGridSettings:
|
||||
class DataGridState:
|
||||
sidebar_visible: bool = False
|
||||
selected_view: str = None
|
||||
row_index: bool = False
|
||||
columns: list[DataGridColumnState] = dataclasses.field(default_factory=list)
|
||||
rows: list[DataGridRowState] = dataclasses.field(default_factory=list) # only the rows that have a specific state
|
||||
footers: list[DataGridFooterConf] = dataclasses.field(default_factory=list)
|
||||
|
||||
@@ -9,7 +9,7 @@ from components.datagrid_new.components.DataGrid import DataGrid
|
||||
from components.debugger.assets.icons import icon_expanded, icon_collapsed, icon_class
|
||||
from components.debugger.commands import JsonViewerCommands
|
||||
from components.debugger.constants import INDENT_SIZE, MAX_TEXT_LENGTH, NODE_OBJECT, NODES_KEYS_TO_NOT_EXPAND
|
||||
from components_helpers import set_boundaries
|
||||
from components_helpers import apply_boundaries
|
||||
from core.serializer import TAG_OBJECT
|
||||
from core.utils import get_unique_id
|
||||
|
||||
@@ -299,7 +299,7 @@ class JsonViewer(BaseComponent):
|
||||
style="margin-left: 0px;"),
|
||||
cls="mmt-jsonviewer",
|
||||
id=f"{self._id}",
|
||||
**set_boundaries(self._boundaries),
|
||||
**apply_boundaries(self._boundaries),
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
|
||||
@@ -11,6 +11,8 @@ from components.drawerlayout.assets.icons import icon_panel_contract_regular, ic
|
||||
from components.drawerlayout.constants import DRAWER_LAYOUT_INSTANCE_ID
|
||||
from components.repositories.components.Repositories import Repositories
|
||||
from components.tabs.components.MyTabs import MyTabs
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.Workflows import Workflows
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager
|
||||
|
||||
@@ -24,11 +26,13 @@ class DrawerLayout(BaseComponent):
|
||||
self._settings_manager = settings_manager
|
||||
self._tabs = InstanceManager.get(session, MyTabs.create_component_id(session), MyTabs)
|
||||
self._repositories = self._create_component(Repositories)
|
||||
self._workflows = self._create_component(Workflows)
|
||||
self._debugger = self._create_component(Debugger)
|
||||
self._add_stuff = self._create_component(AddStuffMenu)
|
||||
self._ai_buddy = self._create_component(AIBuddy)
|
||||
self._admin = self._create_component(Admin)
|
||||
self._applications = self._create_component(Applications)
|
||||
self._undo_redo = self._create_component(UndoRedo)
|
||||
|
||||
self.top_components = self._get_sub_components("TOP", [self._ai_buddy])
|
||||
self.bottom_components = self._get_sub_components("BOTTOM", [self._ai_buddy])
|
||||
@@ -41,6 +45,7 @@ class DrawerLayout(BaseComponent):
|
||||
self._ai_buddy,
|
||||
self._applications,
|
||||
self._repositories,
|
||||
self._workflows,
|
||||
self._admin,
|
||||
self._debugger,
|
||||
),
|
||||
@@ -50,12 +55,16 @@ class DrawerLayout(BaseComponent):
|
||||
name="sidebar"
|
||||
),
|
||||
Div(
|
||||
Label(
|
||||
Input(type="checkbox",
|
||||
onclick=f"document.getElementById('sidebar_{self._id}').classList.toggle('collapsed');"),
|
||||
icon_panel_contract_regular,
|
||||
icon_panel_expand_regular,
|
||||
cls="swap",
|
||||
Div(
|
||||
Label(
|
||||
Input(type="checkbox",
|
||||
onclick=f"document.getElementById('sidebar_{self._id}').classList.toggle('collapsed');"),
|
||||
icon_panel_contract_regular,
|
||||
icon_panel_expand_regular,
|
||||
cls="swap mr-4",
|
||||
),
|
||||
self._undo_redo,
|
||||
cls="flex"
|
||||
),
|
||||
Div(*[component for component in self.top_components], name="top", cls='dl-top'),
|
||||
Div(self._tabs, id=f"page_{self._id}", name="page", cls='dl-page'),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from fasthtml.components import Html
|
||||
from fasthtml.components import *
|
||||
from fasthtml.xtend import Script
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ def get(session):
|
||||
|
||||
|
||||
@rt(Routes.AddRepository)
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository: str, table: str, tab_boundaries:str):
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository: str, table: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddRepository} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository=}, {table=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id) # Repository
|
||||
@@ -34,8 +34,9 @@ def get(session, _id: str, repository_name: str):
|
||||
|
||||
|
||||
@rt(Routes.AddTable)
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository_name: str, table_name: str, tab_boundaries:str):
|
||||
logger.debug(f"Entering {Routes.AddTable} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository_name=}, {table_name=}, {tab_boundaries=}")
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository_name: str, table_name: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddTable} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository_name=}, {table_name=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.add_new_table(tab_id, form_id, repository_name, table_name, json.loads(tab_boundaries))
|
||||
|
||||
@@ -48,7 +49,8 @@ def put(session, _id: str, repository: str):
|
||||
|
||||
|
||||
@rt(Routes.ShowTable)
|
||||
def get(session, _id: str, repository: str, table: str, tab_boundaries:str):
|
||||
logger.debug(f"Entering {Routes.ShowTable} with args {debug_session(session)}, {_id=}, {repository=}, {table=}, {tab_boundaries=}")
|
||||
def get(session, _id: str, repository: str, table: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.ShowTable} with args {debug_session(session)}, {_id=}, {repository=}, {table=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.show_table(repository, table, json.loads(tab_boundaries))
|
||||
|
||||
@@ -5,7 +5,7 @@ from core.settings_management import SettingsManager
|
||||
|
||||
REPOSITORIES_SETTINGS_ENTRY = "Repositories"
|
||||
|
||||
logger = logging.getLogger("AddStuffSettings")
|
||||
logger = logging.getLogger("RepositoriesSettings")
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
function getTabContentBoundaries(tabsId) {
|
||||
const tabsContainer = document.getElementById(tabsId)
|
||||
console.debug("tabsContainer", tabsContainer)
|
||||
const contentDiv = tabsContainer.querySelector('.mmt-tabs-content')
|
||||
|
||||
const boundaries = contentDiv.getBoundingClientRect()
|
||||
|
||||
@@ -149,6 +149,17 @@ class MyTabs(BaseComponent):
|
||||
if active is not None:
|
||||
to_modify.active = active
|
||||
|
||||
def get_tab_content_by_key(self, key):
|
||||
return self.tabs_by_key[key].content if key in self.tabs_by_key else None
|
||||
|
||||
def show_tab(self, tab_key, updated_content=None):
|
||||
if updated_content:
|
||||
tab_id = self._get_tab_id_from_tab_key(tab_key)
|
||||
self.set_tab_content(tab_id, updated_content)
|
||||
|
||||
self.select_tab_by_key(tab_key)
|
||||
return self.refresh()
|
||||
|
||||
def refresh(self):
|
||||
return self.render(oob=True)
|
||||
|
||||
@@ -157,7 +168,7 @@ class MyTabs(BaseComponent):
|
||||
|
||||
def render(self, oob=False):
|
||||
active_content = self.get_active_tab_content()
|
||||
if hasattr(active_content, "on_htmx_after_settle"):
|
||||
if hasattr(active_content, "on_htmx_after_settle") and active_content.on_htmx_after_settle is not None:
|
||||
extra_params = {"hx-on::after-settle": active_content.on_htmx_after_settle()}
|
||||
else:
|
||||
extra_params = {}
|
||||
@@ -185,6 +196,13 @@ class MyTabs(BaseComponent):
|
||||
active_tab = next(filter(lambda t: t.active, self.tabs), None)
|
||||
return active_tab.content if active_tab else None
|
||||
|
||||
def get_active_tab_key(self):
|
||||
active_tab = next(filter(lambda t: t.active, self.tabs), None)
|
||||
return active_tab.key if active_tab else None
|
||||
|
||||
def _get_tab_id_from_tab_key(self, tab_key):
|
||||
return self.tabs_by_key[tab_key].id if tab_key in self.tabs_by_key else None
|
||||
|
||||
@staticmethod
|
||||
def create_component_id(session):
|
||||
prefix = f"{MY_TABS_INSTANCE_ID}{session['user_id']}"
|
||||
|
||||
23
src/components/undo_redo/UndoRedoApp.py
Normal file
23
src/components/undo_redo/UndoRedoApp.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import logging
|
||||
|
||||
from fasthtml.fastapp import fast_app
|
||||
|
||||
from components.undo_redo.constants import Routes
|
||||
from core.instance_manager import debug_session, InstanceManager
|
||||
|
||||
logger = logging.getLogger("UndoRedoApp")
|
||||
|
||||
undo_redo_app, rt = fast_app()
|
||||
|
||||
|
||||
@rt(Routes.Undo)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.Undo} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.undo()
|
||||
|
||||
@rt(Routes.Redo)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.Redo} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.redo()
|
||||
0
src/components/undo_redo/__init__.py
Normal file
0
src/components/undo_redo/__init__.py
Normal file
0
src/components/undo_redo/assets/__init__.py
Normal file
0
src/components/undo_redo/assets/__init__.py
Normal file
7
src/components/undo_redo/assets/icons.py
Normal file
7
src/components/undo_redo/assets/icons.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
# carbon Undo
|
||||
icon_undo = NotStr("""<svg name="undo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 32 32"><path d="M20 10H7.815l3.587-3.586L10 5l-6 6l6 6l1.402-1.415L7.818 12H20a6 6 0 0 1 0 12h-8v2h8a8 8 0 0 0 0-16z" fill="currentColor"></path></svg>""")
|
||||
|
||||
# carbon Redo
|
||||
icon_redo = NotStr("""<svg name="redo" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 32 32"><path d="M12 10h12.185l-3.587-3.586L22 5l6 6l-6 6l-1.402-1.415L24.182 12H12a6 6 0 0 0 0 12h8v2h-8a8 8 0 0 1 0-16z" fill="currentColor"></path></svg>""")
|
||||
25
src/components/undo_redo/commands.py
Normal file
25
src/components/undo_redo/commands.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from components.BaseCommandManager import BaseCommandManager
|
||||
from components.undo_redo.constants import ROUTE_ROOT, Routes
|
||||
|
||||
|
||||
class UndoRedoCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
super().__init__(owner)
|
||||
|
||||
def undo(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.Undo}",
|
||||
"hx-trigger": "click, keyup[ctrlKey&&key=='z'] from:body",
|
||||
"hx-target": f"#{self._id}",
|
||||
"hx-swap": "innerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def redo(self):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.Redo}",
|
||||
"hx_trigger": "click, keyup[ctrlKey&&key=='y'] from:body",
|
||||
"hx-target": f"#{self._id}",
|
||||
"hx-swap": "innerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}"}}',
|
||||
}
|
||||
165
src/components/undo_redo/components/UndoRedo.py
Normal file
165
src/components/undo_redo/components/UndoRedo.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fastcore.xml import FT
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.BaseComponent import BaseComponentSingleton
|
||||
from components.undo_redo.assets.icons import icon_redo, icon_undo
|
||||
from components.undo_redo.commands import UndoRedoCommandManager
|
||||
from components.undo_redo.constants import UNDO_REDO_INSTANCE_ID, UndoRedoAttrs
|
||||
from components_helpers import mk_icon, mk_tooltip
|
||||
from core.settings_management import NoDefault
|
||||
|
||||
logger = logging.getLogger("UndoRedoApp")
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandHistory:
|
||||
attrs: UndoRedoAttrs
|
||||
tab_key: str | None
|
||||
digest: str | None # digest to remember
|
||||
entry: str # digest to remember
|
||||
key: str # key
|
||||
path: str # path within the key if only on subitem needs to be updated
|
||||
|
||||
|
||||
class UndoRedo(BaseComponentSingleton):
|
||||
COMPONENT_INSTANCE_ID = UNDO_REDO_INSTANCE_ID
|
||||
|
||||
def __init__(self, session, _id, settings_manager=None, tabs_manager=None):
|
||||
super().__init__(session, _id, settings_manager, tabs_manager)
|
||||
self.index = -1
|
||||
self.history = []
|
||||
self._commands = UndoRedoCommandManager(self)
|
||||
self._db_engine = settings_manager.get_db_engine()
|
||||
|
||||
def snapshot(self, undo_redo_attrs: UndoRedoAttrs, entry, key, path=None):
|
||||
digest = self._settings_manager.get_digest(self._session, entry) # get the current digest (the last one)
|
||||
active_tab_key = self.tabs_manager.get_active_tab_key()
|
||||
|
||||
# init the history if this is the first call
|
||||
if len(self.history) == 0:
|
||||
digest_history = self._settings_manager.history(self._session, entry, digest, 2)
|
||||
command = CommandHistory(undo_redo_attrs,
|
||||
active_tab_key,
|
||||
digest_history[1] if len(digest_history) > 1 else None,
|
||||
entry,
|
||||
key,
|
||||
path)
|
||||
self.history.append(command)
|
||||
self.index = 0
|
||||
|
||||
command = CommandHistory(undo_redo_attrs, active_tab_key, digest, entry, key, path)
|
||||
|
||||
self.history = self.history[:self.index + 1] #
|
||||
self.history.append(command)
|
||||
self.index = len(self.history) - 1
|
||||
|
||||
def undo(self):
|
||||
logger.debug(f"Undo command")
|
||||
if self.index < 1:
|
||||
logger.debug(f" No command to undo.")
|
||||
return self
|
||||
|
||||
current = self.history[self.index]
|
||||
current_state = self._settings_manager.load(self._session, None, digest=current.digest)
|
||||
|
||||
previous = self.history[self.index - 1]
|
||||
previous_state = self._settings_manager.load(self._session, None, digest=previous.digest)
|
||||
|
||||
# reapply the state
|
||||
if previous_state is not NoDefault:
|
||||
current_state[current.key] = previous_state[current.key]
|
||||
else:
|
||||
del current_state[current.key]
|
||||
self._settings_manager.save(self._session, current.entry, current_state)
|
||||
|
||||
self.index -= 1
|
||||
|
||||
if current.attrs.on_undo is not None:
|
||||
ret = current.attrs.on_undo()
|
||||
if current.attrs.update_tab and current.tab_key is not None and current.tab_key != self.tabs_manager.get_active_tab_key():
|
||||
ret = self.tabs_manager.show_tab(current.tab_key)
|
||||
elif isinstance(ret, FT) and 'id' in ret.attrs:
|
||||
ret.attrs["hx-swap-oob"] = "true"
|
||||
return self, ret
|
||||
else:
|
||||
return self
|
||||
|
||||
def redo(self):
|
||||
logger.debug(f"Redo command")
|
||||
if self.index >= len(self.history) - 1:
|
||||
logger.debug(f" No command to undo.")
|
||||
return self
|
||||
|
||||
current = self.history[self.index]
|
||||
current_state = self._settings_manager.load(self._session, None, digest=current.digest)
|
||||
|
||||
next_ = self.history[self.index + 1]
|
||||
next_state = self._settings_manager.load(self._session, None, digest=next_.digest)
|
||||
|
||||
# reapply the state
|
||||
if current_state is not NoDefault:
|
||||
current_state[current.key] = next_state[current.key]
|
||||
else:
|
||||
current_state = {current.key: next_state[current.key]}
|
||||
self._settings_manager.save(self._session, current.entry, current_state)
|
||||
|
||||
self.index += 1
|
||||
|
||||
if current.attrs.on_redo is not None:
|
||||
ret = current.attrs.on_undo()
|
||||
if current.attrs.update_tab and current.tab_key is not None and current.tab_key != self.tabs_manager.get_active_tab_key():
|
||||
ret = self.tabs_manager.show_tab(current.tab_key)
|
||||
elif isinstance(ret, FT) and 'id' in ret.attrs:
|
||||
ret.attrs["hx-swap-oob"] = "true"
|
||||
return self, ret
|
||||
else:
|
||||
return self
|
||||
|
||||
def refresh(self):
|
||||
return self.__ft__(oob=True)
|
||||
|
||||
def __ft__(self, oob=False):
|
||||
return Div(
|
||||
self._mk_undo(),
|
||||
self._mk_redo(),
|
||||
id=self._id,
|
||||
cls="flex",
|
||||
hx_swap_oob="true" if oob else None
|
||||
)
|
||||
|
||||
def _mk_undo(self):
|
||||
if self._can_undo():
|
||||
command = self.history[self.index]
|
||||
return mk_tooltip(mk_icon(icon_undo,
|
||||
size=24,
|
||||
**self._commands.undo()),
|
||||
f"Undo '{command.attrs.name}'.")
|
||||
else:
|
||||
return mk_tooltip(mk_icon(icon_undo,
|
||||
size=24,
|
||||
can_select=False,
|
||||
cls="mmt-btn-disabled"),
|
||||
"Nothing to undo.")
|
||||
|
||||
def _mk_redo(self):
|
||||
if self._can_redo():
|
||||
command = self.history[self.index + 1]
|
||||
return mk_tooltip(mk_icon(icon_redo,
|
||||
size=24,
|
||||
**self._commands.redo()),
|
||||
f"Redo '{command.attrs.name}'.")
|
||||
else:
|
||||
return mk_tooltip(mk_icon(icon_redo,
|
||||
size=24,
|
||||
can_select=False,
|
||||
cls="mmt-btn-disabled"),
|
||||
"Nothing to redo.")
|
||||
|
||||
def _can_undo(self):
|
||||
return self.index >= 1
|
||||
|
||||
def _can_redo(self):
|
||||
return self.index < len(self.history) - 1
|
||||
0
src/components/undo_redo/components/__init__.py
Normal file
0
src/components/undo_redo/components/__init__.py
Normal file
24
src/components/undo_redo/constants.py
Normal file
24
src/components/undo_redo/constants.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable
|
||||
|
||||
UNDO_REDO_INSTANCE_ID = "__UndoRedo__"
|
||||
|
||||
ROUTE_ROOT = "/undo"
|
||||
|
||||
|
||||
class Routes:
|
||||
Undo = "/undo"
|
||||
Redo = "/redo"
|
||||
|
||||
|
||||
@dataclass
|
||||
class UndoRedoAttrs:
|
||||
name: str
|
||||
desc: str = None
|
||||
update_tab: bool = True
|
||||
on_undo: Callable = None
|
||||
on_redo: Callable = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.on_redo is None:
|
||||
self.on_redo = self.on_undo
|
||||
24
src/components/workflows/Readme.md
Normal file
24
src/components/workflows/Readme.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# id
|
||||
|
||||
**Workflow Designer ids**:
|
||||
|
||||
using `_id={WORKFLOW_DESIGNER_INSTANCE_ID}{session['user_id']}{get_unique_id()}`
|
||||
|
||||
| Name | value |
|
||||
|----------------------------------|--------------------------------|
|
||||
| Canvas | `c_{self._id}` |
|
||||
| Designer | `d_{self._id}` |
|
||||
| Error Message | `err_{self._id}` |
|
||||
| Properties | `p_{self._id}` |
|
||||
| Properties Input Section | `pi_{self._id}` |
|
||||
| Properties Output Section | `po_{self._id}` |
|
||||
| Properties Properties Section | `pp_{self._id}` |
|
||||
| Properties Properties drag top | `ppt_{self._id}` |
|
||||
| Properties Properties drag left | `ppl_{self._id}` |
|
||||
| Properties Properties drag right | `ppr_{self._id}` |
|
||||
| Properties Properties content | `ppc_{self._id}` |
|
||||
| Spliter | `s_{self._id}` |
|
||||
| Top element | `t_{self._id}` |
|
||||
| Form for properties | `f_{self._id}_{component_id}` |
|
||||
| Form for output properties | `fo_{self._id}_{component_id}` |
|
||||
|
||||
156
src/components/workflows/WorkflowsApp.py
Normal file
156
src/components/workflows/WorkflowsApp.py
Normal file
@@ -0,0 +1,156 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fasthtml.fastapp import fast_app
|
||||
|
||||
from components.workflows.constants import Routes
|
||||
from core.instance_manager import InstanceManager, debug_session
|
||||
|
||||
logger = logging.getLogger("WorkflowsApp")
|
||||
|
||||
repositories_app, rt = fast_app()
|
||||
|
||||
|
||||
@rt(Routes.AddWorkflow)
|
||||
def get(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.AddWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.request_new_workflow()
|
||||
|
||||
|
||||
@rt(Routes.AddWorkflow)
|
||||
def post(session, _id: str, tab_id: str, form_id: str, name: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddWorkflow} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {name=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.add_new_workflow(tab_id, form_id, name, json.loads(tab_boundaries))
|
||||
|
||||
|
||||
@rt(Routes.ShowWorkflow)
|
||||
def post(session, _id: str, name: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddWorkflow} with args {debug_session(session)}, {_id=}, {name=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.show_workflow(name, json.loads(tab_boundaries))
|
||||
|
||||
|
||||
@rt(Routes.AddComponent)
|
||||
def post(session, _id: str, component_type: str, x: float, y: float):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddComponent} with args {debug_session(session)}, {_id=}, {component_type=}, {x=}, {y=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.add_component(component_type, x, y)
|
||||
|
||||
|
||||
@rt(Routes.MoveComponent)
|
||||
def post(session, _id: str, component_id: str, x: float, y: float):
|
||||
logger.debug(
|
||||
f"Entering {Routes.MoveComponent} with args {debug_session(session)}, {_id=}, {component_id=}, {x=}, {y=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.move_component(component_id, x, y)
|
||||
|
||||
|
||||
@rt(Routes.DeleteComponent)
|
||||
def post(session, _id: str, component_id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.DeleteComponent} with args {debug_session(session)}, {_id=}, {component_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.delete_component(component_id)
|
||||
|
||||
|
||||
@rt(Routes.AddConnection)
|
||||
def post(session, _id: str, from_id: str, to_id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddConnection} with args {debug_session(session)}, {_id=}, {from_id=}, {to_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.add_connection(from_id, to_id)
|
||||
|
||||
|
||||
@rt(Routes.DeleteConnection)
|
||||
def post(session, _id: str, from_id: str, to_id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.DeleteConnection} with args {debug_session(session)}, {_id=}, {from_id=}, {to_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.delete_connection(from_id, to_id)
|
||||
|
||||
|
||||
@rt(Routes.ResizeDesigner)
|
||||
def post(session, _id: str, designer_height: int):
|
||||
logger.debug(
|
||||
f"Entering {Routes.ResizeDesigner} with args {debug_session(session)}, {_id=}, {designer_height=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.set_designer_height(designer_height)
|
||||
|
||||
|
||||
@rt(Routes.UpdatePropertiesLayout)
|
||||
def post(session, _id: str, input_width: int, properties_width: int, output_width: int):
|
||||
logger.debug(
|
||||
f"Entering {Routes.UpdatePropertiesLayout} with args {debug_session(session)}, {_id=}, {input_width=}, {properties_width=}, {output_width=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.update_properties_layout(input_width, properties_width, output_width)
|
||||
|
||||
|
||||
@rt(Routes.SelectComponent)
|
||||
def post(session, _id: str, component_id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.SelectComponent} with args {debug_session(session)}, {_id=}, {component_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.select_component(component_id)
|
||||
|
||||
|
||||
@rt(Routes.SaveProperties)
|
||||
def post(session, _id: str, component_id: str, details: dict):
|
||||
logger.debug(
|
||||
f"Entering {Routes.SaveProperties} with args {debug_session(session)}, {_id=}, {component_id=}, {details=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
details.pop("_id")
|
||||
details.pop("component_id")
|
||||
return instance.save_properties(component_id, details)
|
||||
|
||||
|
||||
@rt(Routes.CancelProperties)
|
||||
def post(session, _id: str, component_id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.CancelProperties} with args {debug_session(session)}, {_id=}, {component_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.cancel_properties(component_id)
|
||||
|
||||
|
||||
@rt(Routes.SelectProcessor)
|
||||
def post(session, _id: str, component_id: str, processor_name: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.SelectProcessor} with args {debug_session(session)}, {_id=}, {component_id=}, {processor_name=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.set_selected_processor(component_id, processor_name)
|
||||
|
||||
|
||||
@rt(Routes.OnProcessorDetailsEvent)
|
||||
def post(session, _id: str, component_id: str, event_name: str, details: dict):
|
||||
logger.debug(
|
||||
f"Entering {Routes.OnProcessorDetailsEvent} with args {debug_session(session)}, {_id=}, {component_id=}, {event_name=}, {details=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
details.pop("_id")
|
||||
details.pop("component_id")
|
||||
details.pop("event_name")
|
||||
return instance.on_processor_details_event(component_id, event_name, details)
|
||||
|
||||
|
||||
@rt(Routes.PlayWorkflow)
|
||||
def post(session, _id: str, tab_boundaries: str):
|
||||
logger.debug(f"Entering {Routes.PlayWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.play_workflow(json.loads(tab_boundaries))
|
||||
|
||||
|
||||
@rt(Routes.StopWorkflow)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.StopWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.stop_workflow()
|
||||
|
||||
|
||||
@rt(Routes.Refresh)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.Refresh} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.refresh()
|
||||
0
src/components/workflows/__init__.py
Normal file
0
src/components/workflows/__init__.py
Normal file
292
src/components/workflows/assets/Workflows.css
Normal file
292
src/components/workflows/assets/Workflows.css
Normal file
@@ -0,0 +1,292 @@
|
||||
.wkf-toolbox-item {
|
||||
cursor: grab;
|
||||
}
|
||||
|
||||
.wkf-toolbox-item:active {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
.wkf-splitter {
|
||||
cursor: row-resize;
|
||||
height: 1px;
|
||||
background-color: var(--color-splitter);
|
||||
margin: 4px 0;
|
||||
transition: background-color 0.2s;
|
||||
position: relative; /* Ensure the parent has position relative */
|
||||
|
||||
}
|
||||
|
||||
.wkf-splitter::after {
|
||||
--color-resize: var(--color-splitter);
|
||||
content: ''; /* This is required */
|
||||
position: absolute; /* Position as needed */
|
||||
z-index: 1;
|
||||
display: block; /* Makes it a block element */
|
||||
height: 6px;
|
||||
width: 20px;
|
||||
background-color: var(--color-splitter);
|
||||
|
||||
/* Center horizontally */
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
|
||||
/* Center vertically */
|
||||
top: 50%;
|
||||
margin-top: -3px; /* Half of the height */
|
||||
/* Alternatively: transform: translate(-50%, -50%); */
|
||||
}
|
||||
|
||||
|
||||
.wkf-splitter:hover, .wkf-splitter-active {
|
||||
background-color: var(--color-splitter-active);
|
||||
}
|
||||
|
||||
.wkf-designer {
|
||||
min-height: 230px;
|
||||
}
|
||||
|
||||
.wkf-properties {
|
||||
box-sizing: border-box;
|
||||
position: relative;
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: var(--color-base-100); /* bg-base-100 */
|
||||
}
|
||||
|
||||
.wkf-properties-input, .wkf-properties-output {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: var(--color-base-100); /* bg-base-100 */
|
||||
overflow: auto;
|
||||
|
||||
}
|
||||
|
||||
.wkf-properties-input {
|
||||
border-width: 1px;
|
||||
border-top-left-radius: 0.5rem; /* rounded on left side */
|
||||
border-bottom-left-radius: 0.5rem;
|
||||
border-top-right-radius: 0; /* not rounded on right side */
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
|
||||
.wkf-properties-output {
|
||||
border-width: 1px;
|
||||
border-top-right-radius: 0.5rem; /* rounded on right side */
|
||||
border-bottom-right-radius: 0.5rem;
|
||||
border-top-left-radius: 0; /* not rounded on left side */
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
|
||||
.wkf-properties-properties {
|
||||
vertical-align: top;
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.wkf-properties-handle-left {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
width: 5px;
|
||||
height: 100%;
|
||||
cursor: ew-resize;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.wkf-properties-handle-right {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 0;
|
||||
width: 5px;
|
||||
height: 100%;
|
||||
cursor: ew-resize;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.wkf-properties-top {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
cursor: move;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.wkf-properties-handle-top {
|
||||
background-image: radial-gradient(var(--color-splitter) 40%, transparent 0);
|
||||
background-repeat: repeat;
|
||||
background-size: 4px 4px;
|
||||
cursor: move;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 8px;
|
||||
width: 20px;
|
||||
position: relative;
|
||||
top: 1px;
|
||||
}
|
||||
|
||||
.wkf-properties-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%; /* or inherit from a fixed-height parent */
|
||||
}
|
||||
|
||||
.wkf-properties-content-header {
|
||||
flex-shrink: 0; /* optional: prevent it from shrinking */
|
||||
}
|
||||
|
||||
.wkf-properties-content-form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex-grow: 1;
|
||||
overflow: hidden; /* prevent double scrollbars if needed */
|
||||
}
|
||||
|
||||
|
||||
.wkf-canvas {
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
background-image:
|
||||
linear-gradient(rgba(0,0,0,.1) 1px, transparent 1px),
|
||||
linear-gradient(90deg, rgba(0,0,0,.1) 1px, transparent 1px);
|
||||
background-size: 20px 20px;
|
||||
}
|
||||
|
||||
.wkf-canvas-error {
|
||||
border: 3px solid var(--color-error);
|
||||
}
|
||||
|
||||
.wkf-toolbox {
|
||||
min-height: 230px;
|
||||
width: 8rem; /* w-32 (32 * 0.25rem = 8rem) */
|
||||
padding: 0.5rem; /* p-2 */
|
||||
background-color: var(--color-base-100); /* bg-base-100 */
|
||||
border-radius: 0.5rem; /* rounded-lg */
|
||||
border-width: 1px; /* border */
|
||||
}
|
||||
|
||||
|
||||
.wkf-workflow-component {
|
||||
position: absolute;
|
||||
cursor: move;
|
||||
border: 2px solid transparent;
|
||||
transition: all 0.2s;
|
||||
height: 64px;
|
||||
}
|
||||
|
||||
.wkf-workflow-component:hover {
|
||||
border-color: #3b82f6;
|
||||
transform: scale(1.02);
|
||||
}
|
||||
|
||||
.wkf-workflow-component.selected {
|
||||
border-color: #ef4444;
|
||||
box-shadow: 0 0 10px rgba(239, 68, 68, 0.3);
|
||||
}
|
||||
|
||||
.wkf-workflow-component.dragging {
|
||||
transition: none;
|
||||
}
|
||||
|
||||
.wkf-workflow-component.error {
|
||||
background: var(--color-error);
|
||||
}
|
||||
|
||||
|
||||
.wkf-component-content {
|
||||
padding: 0.75rem; /* p-3 in Tailwind */
|
||||
border-radius: 0.5rem; /* rounded-lg in Tailwind */
|
||||
border-width: 2px; /* border-2 in Tailwind */
|
||||
background-color: white; /* bg-white in Tailwind */
|
||||
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05); /* shadow-lg in Tailwind */
|
||||
display: flex; /* flex in Tailwind */
|
||||
align-items: center; /* items-center in Tailwind */
|
||||
}
|
||||
|
||||
.wkf-component-content.error {
|
||||
background: var(--color-error);
|
||||
}
|
||||
|
||||
.wkf-component-content.not-run {
|
||||
}
|
||||
|
||||
.wkf-connection-line {
|
||||
position: absolute;
|
||||
pointer-events: none;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.wkf-connection-point {
|
||||
position: absolute;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
background: #3b82f6;
|
||||
border-radius: 50%;
|
||||
cursor: crosshair;
|
||||
border: 2px solid white;
|
||||
box-shadow: 0 2px 4px rgba(0,0,0,0.2);
|
||||
transition: background-color 0.2s, transform 0.2s;
|
||||
}
|
||||
|
||||
.wkf-connection-point.potential-connection {
|
||||
box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.5);
|
||||
animation: pulse 0.7s infinite;
|
||||
}
|
||||
|
||||
.wkf-connection-point.potential-start {
|
||||
background: #ef4444;
|
||||
}
|
||||
|
||||
.wkf-output-point {
|
||||
right: -6px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
|
||||
.wkf-input-point {
|
||||
left: -6px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
}
|
||||
|
||||
.wkf-connection-point:hover {
|
||||
background: #ef4444;
|
||||
transform: translateY(-50%) scale(1.2);
|
||||
}
|
||||
@keyframes pulse {
|
||||
0% { box-shadow: 0 0 0 0 rgba(59, 130, 246, 0.7); }
|
||||
70% { box-shadow: 0 0 0 6px rgba(59, 130, 246, 0); }
|
||||
100% { box-shadow: 0 0 0 0 rgba(59, 130, 246, 0); }
|
||||
}
|
||||
|
||||
.wkf-connection-path {
|
||||
stroke: #3b82f6;
|
||||
stroke-width: 2;
|
||||
fill: none;
|
||||
cursor: pointer;
|
||||
pointer-events: none;
|
||||
transition: stroke 0.2s ease, stroke-width 0.2s ease;
|
||||
}
|
||||
|
||||
.wkf-connection-path-thick {
|
||||
stroke: transparent;
|
||||
stroke-width: 10;
|
||||
fill: none;
|
||||
cursor: pointer;
|
||||
pointer-events: stroke;
|
||||
}
|
||||
|
||||
.wkf-connection-path-arrowhead {
|
||||
fill:#3b82f6;
|
||||
}
|
||||
|
||||
.wkf-connection-selected {
|
||||
stroke: #ef4444 !important;
|
||||
}
|
||||
|
||||
.wkf-connection-path-arrowhead-selected {
|
||||
fill:#ef4444 !important;;
|
||||
}
|
||||
765
src/components/workflows/assets/Workflows.js
Normal file
765
src/components/workflows/assets/Workflows.js
Normal file
@@ -0,0 +1,765 @@
|
||||
function bindWorkflowDesigner(elementId) {
|
||||
bindWorkflowDesignerToolbox(elementId)
|
||||
bindWorkflowDesignerSplitter(elementId)
|
||||
bindWorkflowProperties(elementId)
|
||||
}
|
||||
|
||||
function bindWorkflowDesignerToolbox(elementId) {
|
||||
// Constants for configuration
|
||||
const CONFIG = {
|
||||
COMPONENT_WIDTH: 128,
|
||||
COMPONENT_HEIGHT: 64,
|
||||
DRAG_OFFSET: { x: 64, y: 40 },
|
||||
CONNECTION_POINT_RADIUS: 6,
|
||||
INVISIBLE_DRAG_IMAGE: 'data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7',
|
||||
};
|
||||
|
||||
// Designer state with better organization
|
||||
const designer = {
|
||||
// Drag state
|
||||
draggedType: null,
|
||||
draggedComponent: null,
|
||||
|
||||
// Selection state
|
||||
selectedComponent: null,
|
||||
selectedConnection: null,
|
||||
|
||||
// Connection state
|
||||
connectionStart: null,
|
||||
potentialConnectionStart: null,
|
||||
|
||||
// Performance optimization
|
||||
lastUpdateTime: 0,
|
||||
animationFrame: null,
|
||||
|
||||
// Cleanup tracking
|
||||
eventListeners: new Map(),
|
||||
|
||||
// State methods
|
||||
reset() {
|
||||
this.draggedType = null;
|
||||
this.draggedComponent = null;
|
||||
this.connectionStart = null;
|
||||
this.potentialConnectionStart = null;
|
||||
this.cancelAnimationFrame();
|
||||
},
|
||||
|
||||
cancelAnimationFrame() {
|
||||
if (this.animationFrame) {
|
||||
cancelAnimationFrame(this.animationFrame);
|
||||
this.animationFrame = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Get DOM elements with error handling
|
||||
const designerContainer = document.getElementById(elementId);
|
||||
const canvas = document.getElementById(`c_${elementId}`);
|
||||
|
||||
if (!designerContainer || !canvas) {
|
||||
console.error(`Workflow designer elements not found for ID: ${elementId}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Utility functions
|
||||
const utils = {
|
||||
|
||||
// Check if two rectangles overlap
|
||||
isOverlapping(rect1, circle) {
|
||||
// Find the closest point on the rectangle to the circle's center
|
||||
const closestX = Math.max(rect1.x, Math.min(circle.x, rect1.x + rect1.width));
|
||||
const closestY = Math.max(rect1.y, Math.min(circle.y, rect1.y + rect1.height));
|
||||
|
||||
// Calculate the distance between the circle's center and the closest point
|
||||
const deltaX = circle.x - closestX;
|
||||
const deltaY = circle.y - closestY;
|
||||
const distanceSquared = deltaX * deltaX + deltaY * deltaY;
|
||||
|
||||
// Check if the distance is less than or equal to the circle's radius
|
||||
return distanceSquared <= circle.radius * circle.radius;
|
||||
},
|
||||
|
||||
// Get mouse position relative to canvas
|
||||
getCanvasPosition(event) {
|
||||
const rect = canvas.getBoundingClientRect();
|
||||
return {
|
||||
x: event.clientX - rect.left,
|
||||
y: event.clientY - rect.top
|
||||
};
|
||||
},
|
||||
|
||||
// Constrain position within canvas bounds
|
||||
constrainPosition(x, y) {
|
||||
const canvasRect = canvas.getBoundingClientRect();
|
||||
return {
|
||||
x: Math.max(0, Math.min(x, canvasRect.width - CONFIG.COMPONENT_WIDTH)),
|
||||
y: Math.max(0, Math.min(y, canvasRect.height - CONFIG.COMPONENT_HEIGHT))
|
||||
};
|
||||
},
|
||||
|
||||
// Create HTMX request with error handling
|
||||
async makeRequest(url, values, targetId = `#c_${elementId}`, swap="innerHTML") {
|
||||
try {
|
||||
return htmx.ajax('POST', url, {
|
||||
target: targetId,
|
||||
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
||||
swap: swap,
|
||||
values: { _id: elementId, ...values }
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('HTMX request failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Connection management
|
||||
const connectionManager = {
|
||||
// Update all connections with performance optimization
|
||||
updateAll() {
|
||||
designer.cancelAnimationFrame();
|
||||
designer.animationFrame = requestAnimationFrame(() => {
|
||||
const connectionLines = designerContainer.querySelectorAll('.wkf-connection-line');
|
||||
connectionLines.forEach(svg => {
|
||||
const { fromId, toId } = svg.dataset;
|
||||
if (fromId && toId) {
|
||||
this.updateLine(svg, fromId, toId);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
// Update a specific connection line
|
||||
updateLine(svg, fromId, toId) {
|
||||
const fromComp = designerContainer.querySelector(`[data-component-id="${fromId}"]`);
|
||||
const toComp = designerContainer.querySelector(`[data-component-id="${toId}"]`);
|
||||
|
||||
if (!fromComp || !toComp) return;
|
||||
|
||||
// Calculate connection points
|
||||
const fromX = parseInt(fromComp.style.left) + CONFIG.COMPONENT_WIDTH;
|
||||
const fromY = parseInt(fromComp.style.top) + CONFIG.COMPONENT_HEIGHT / 2;
|
||||
const toX = parseInt(toComp.style.left);
|
||||
const toY = parseInt(toComp.style.top) + CONFIG.COMPONENT_HEIGHT / 2;
|
||||
|
||||
// Create smooth curved path
|
||||
const midX = (fromX + toX) / 2;
|
||||
const path = `M ${fromX} ${fromY} C ${midX} ${fromY}, ${midX} ${toY}, ${toX} ${toY}`;
|
||||
|
||||
// Update the path element
|
||||
const pathElement = svg.querySelector('.wkf-connection-path');
|
||||
if (pathElement) {
|
||||
pathElement.setAttribute('d', path);
|
||||
}
|
||||
},
|
||||
|
||||
// Clear all connection highlighting
|
||||
clearHighlighting() {
|
||||
designerContainer.querySelectorAll('.wkf-connection-point').forEach(point => {
|
||||
point.classList.remove('potential-connection', 'potential-start');
|
||||
point.style.background = '#3b82f6';
|
||||
});
|
||||
},
|
||||
|
||||
// Select a connection
|
||||
select(connectionPath) {
|
||||
// Deselect all other connections
|
||||
designerContainer.querySelectorAll('.wkf-connection-line path').forEach(path => {
|
||||
path.classList.remove('wkf-connection-selected');
|
||||
});
|
||||
|
||||
|
||||
// Select the clicked connection
|
||||
connectionPath.classList.add('wkf-connection-selected');
|
||||
|
||||
// Store connection data
|
||||
const connectionSvg = connectionPath.closest('.wkf-connection-line');
|
||||
designer.selectedConnection = {
|
||||
fromId: connectionSvg.dataset.fromId,
|
||||
toId: connectionSvg.dataset.toId
|
||||
};
|
||||
},
|
||||
|
||||
// Deselect all connections
|
||||
deselectAll() {
|
||||
designerContainer.querySelectorAll('.wkf-connection-line path').forEach(path => {
|
||||
path.classList.remove('wkf-connection-selected');
|
||||
});
|
||||
designer.selectedConnection = null;
|
||||
}
|
||||
};
|
||||
|
||||
// Component management
|
||||
const componentManager = {
|
||||
// Select a component
|
||||
select(component) {
|
||||
// Deselect all other components
|
||||
designerContainer.querySelectorAll('.wkf-workflow-component').forEach(comp => {
|
||||
comp.classList.remove('selected');
|
||||
});
|
||||
|
||||
// Select the clicked component
|
||||
component.classList.add('selected');
|
||||
designer.selectedComponent = component.dataset.componentId;
|
||||
|
||||
// Also trigger server-side selection
|
||||
utils.makeRequest('/workflows/select-component', {
|
||||
component_id: designer.selectedComponent
|
||||
}, `#ppc_${elementId}`, "outerHTML");
|
||||
},
|
||||
|
||||
// Deselect all components
|
||||
deselectAll() {
|
||||
designerContainer.querySelectorAll('.wkf-workflow-component').forEach(comp => {
|
||||
comp.classList.remove('selected');
|
||||
});
|
||||
designer.selectedComponent = null;
|
||||
},
|
||||
|
||||
// Update component position with constraints
|
||||
updatePosition(component, x, y) {
|
||||
const constrained = utils.constrainPosition(x, y);
|
||||
component.style.left = constrained.x + 'px';
|
||||
component.style.top = constrained.y + 'px';
|
||||
}
|
||||
};
|
||||
|
||||
// Event handlers with improved organization
|
||||
const eventHandlers = {
|
||||
// Handle drag start for both toolbox items and components
|
||||
onDragStart(event) {
|
||||
const toolboxItem = event.target.closest('.wkf-toolbox-item');
|
||||
const component = event.target.closest('.wkf-workflow-component');
|
||||
|
||||
if (toolboxItem) {
|
||||
designer.draggedType = toolboxItem.dataset.type;
|
||||
event.dataTransfer.effectAllowed = 'copy';
|
||||
return;
|
||||
}
|
||||
|
||||
if (component) {
|
||||
component.classList.add('dragging');
|
||||
designer.draggedComponent = component.dataset.componentId;
|
||||
event.dataTransfer.effectAllowed = 'move';
|
||||
|
||||
// Use invisible drag image
|
||||
const invisibleImg = new Image();
|
||||
invisibleImg.src = CONFIG.INVISIBLE_DRAG_IMAGE;
|
||||
event.dataTransfer.setDragImage(invisibleImg, 0, 0);
|
||||
|
||||
// Highlight potential connection points
|
||||
designerContainer.querySelectorAll('.wkf-connection-point').forEach(point => {
|
||||
if (point.dataset.pointType === 'output' &&
|
||||
point.dataset.componentId !== designer.draggedComponent) {
|
||||
point.classList.add('potential-connection');
|
||||
}
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
// Handle drag with immediate updates
|
||||
onDrag(event) {
|
||||
if (!event.target.closest('.wkf-workflow-component')) return;
|
||||
if (event.clientX === 0 && event.clientY === 0) return;
|
||||
|
||||
const component = event.target.closest('.wkf-workflow-component');
|
||||
const position = utils.getCanvasPosition(event);
|
||||
const x = position.x - CONFIG.DRAG_OFFSET.x;
|
||||
const y = position.y - CONFIG.DRAG_OFFSET.y;
|
||||
|
||||
// Update position immediately for responsive feel
|
||||
componentManager.updatePosition(component, x, y);
|
||||
|
||||
// Check for potential connections
|
||||
eventHandlers.checkPotentialConnections(component);
|
||||
|
||||
// Update connections with requestAnimationFrame for smooth rendering
|
||||
connectionManager.updateAll();
|
||||
},
|
||||
|
||||
// Check for potential connections during drag
|
||||
checkPotentialConnections(component) {
|
||||
const componentRect = component.getBoundingClientRect();
|
||||
const componentId = component.dataset.componentId;
|
||||
const outputPoints = designerContainer.querySelectorAll('.wkf-connection-point[data-point-type="output"]');
|
||||
|
||||
outputPoints.forEach(point => {
|
||||
if (point.dataset.componentId === componentId) return;
|
||||
|
||||
const pointRect = point.getBoundingClientRect();
|
||||
const pointCircle = {
|
||||
x: pointRect.left + pointRect.width / 2,
|
||||
y: pointRect.top + pointRect.height / 2,
|
||||
radius: CONFIG.CONNECTION_POINT_RADIUS
|
||||
};
|
||||
|
||||
if (point !== designer.potentialConnectionStart &&
|
||||
utils.isOverlapping(componentRect, pointCircle)) {
|
||||
|
||||
// Clear previous potential starts
|
||||
outputPoints.forEach(otherPoint => {
|
||||
otherPoint.classList.remove('potential-start');
|
||||
});
|
||||
|
||||
designer.potentialConnectionStart = point.dataset.componentId;
|
||||
point.classList.add('potential-start');
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
// Handle drag end with cleanup
|
||||
async onDragEnd(event) {
|
||||
if (!event.target.closest('.wkf-workflow-component')) return;
|
||||
|
||||
if (designer.draggedComponent) {
|
||||
const component = event.target.closest('.wkf-workflow-component');
|
||||
const draggedComponentId = component.dataset.componentId;
|
||||
|
||||
component.classList.remove('dragging');
|
||||
|
||||
const position = utils.getCanvasPosition(event);
|
||||
const x = position.x - CONFIG.DRAG_OFFSET.x;
|
||||
const y = position.y - CONFIG.DRAG_OFFSET.y;
|
||||
const constrained = utils.constrainPosition(x, y);
|
||||
|
||||
try {
|
||||
// Move component
|
||||
await utils.makeRequest('/workflows/move-component', {
|
||||
component_id: designer.draggedComponent,
|
||||
x: constrained.x,
|
||||
y: constrained.y
|
||||
});
|
||||
|
||||
// Create connection if applicable
|
||||
if (designer.potentialConnectionStart) {
|
||||
await utils.makeRequest('/workflows/add-connection', {
|
||||
from_id: designer.potentialConnectionStart,
|
||||
to_id: draggedComponentId
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to update component:', error);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
connectionManager.clearHighlighting();
|
||||
designer.reset();
|
||||
connectionManager.updateAll();
|
||||
}
|
||||
},
|
||||
|
||||
// Handle clicks with improved event delegation
|
||||
onClick(event) {
|
||||
|
||||
// Connection point handling
|
||||
const connectionPoint = event.target.closest('.wkf-connection-point');
|
||||
if (connectionPoint) {
|
||||
event.stopPropagation();
|
||||
eventHandlers.handleConnectionPoint(connectionPoint);
|
||||
}
|
||||
|
||||
// Connection selection
|
||||
const connectionPath = event.target.closest('.wkf-connection-line path');
|
||||
if (connectionPath) {
|
||||
event.stopPropagation();
|
||||
componentManager.deselectAll();
|
||||
// get the visible connection path
|
||||
const visibleConnectionPath = connectionPath.parentElement.querySelector('.wkf-connection-path');
|
||||
connectionManager.select(visibleConnectionPath);
|
||||
return;
|
||||
}
|
||||
|
||||
// Canvas click - reset everything
|
||||
if (event.target === canvas || event.target.classList.contains('wkf-canvas')) {
|
||||
designer.reset();
|
||||
connectionManager.clearHighlighting();
|
||||
connectionManager.deselectAll();
|
||||
componentManager.deselectAll();
|
||||
return;
|
||||
}
|
||||
|
||||
// Component selection
|
||||
const component = event.target.closest('.wkf-workflow-component');
|
||||
if (component) {
|
||||
event.stopPropagation();
|
||||
connectionManager.deselectAll();
|
||||
componentManager.select(component);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
},
|
||||
|
||||
// Handle connection point interactions
|
||||
async handleConnectionPoint(connectionPoint) {
|
||||
const componentId = connectionPoint.dataset.componentId;
|
||||
const pointType = connectionPoint.dataset.pointType;
|
||||
|
||||
if (!designer.connectionStart) {
|
||||
// Start connection from output point
|
||||
if (pointType === 'output') {
|
||||
designer.connectionStart = { componentId, pointType };
|
||||
connectionPoint.style.background = '#ef4444';
|
||||
}
|
||||
} else {
|
||||
// Complete connection to input point
|
||||
if (pointType === 'input' && componentId !== designer.connectionStart.componentId) {
|
||||
try {
|
||||
await utils.makeRequest('/workflows/add-connection', {
|
||||
from_id: designer.connectionStart.componentId,
|
||||
to_id: componentId
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to create connection:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Reset connection mode
|
||||
connectionManager.clearHighlighting();
|
||||
designer.connectionStart = null;
|
||||
}
|
||||
},
|
||||
|
||||
// Handle canvas drop for new components
|
||||
async onCanvasDrop(event) {
|
||||
event.preventDefault();
|
||||
|
||||
if (designer.draggedType) {
|
||||
const position = utils.getCanvasPosition(event);
|
||||
const x = position.x - CONFIG.DRAG_OFFSET.x;
|
||||
const y = position.y - CONFIG.DRAG_OFFSET.y;
|
||||
const constrained = utils.constrainPosition(x, y);
|
||||
|
||||
try {
|
||||
await utils.makeRequest('/workflows/add-component', {
|
||||
component_type: designer.draggedType,
|
||||
x: constrained.x,
|
||||
y: constrained.y
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to add component:', error);
|
||||
}
|
||||
|
||||
designer.draggedType = null;
|
||||
}
|
||||
},
|
||||
|
||||
// Handle keyboard shortcuts
|
||||
async onKeyDown(event) {
|
||||
if (event.key === 'Delete' || event.key === 'Suppr') {
|
||||
try {
|
||||
if (designer.selectedComponent) {
|
||||
await utils.makeRequest('/workflows/delete-component', {
|
||||
component_id: designer.selectedComponent
|
||||
});
|
||||
designer.selectedComponent = null;
|
||||
} else if (designer.selectedConnection) {
|
||||
await utils.makeRequest('/workflows/delete-connection', {
|
||||
from_id: designer.selectedConnection.fromId,
|
||||
to_id: designer.selectedConnection.toId
|
||||
});
|
||||
designer.selectedConnection = null;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to delete:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Event registration with cleanup tracking
|
||||
function registerEventListener(element, event, handler, options = {}) {
|
||||
const key = `${element.id || 'global'}-${event}`;
|
||||
element.addEventListener(event, handler, options);
|
||||
designer.eventListeners.set(key, () => element.removeEventListener(event, handler, options));
|
||||
}
|
||||
|
||||
// Register all event listeners
|
||||
registerEventListener(designerContainer, 'dragstart', eventHandlers.onDragStart);
|
||||
registerEventListener(designerContainer, 'drag', eventHandlers.onDrag);
|
||||
registerEventListener(designerContainer, 'dragend', eventHandlers.onDragEnd);
|
||||
registerEventListener(designerContainer, 'click', eventHandlers.onClick);
|
||||
registerEventListener(canvas, 'dragover', (event) => {
|
||||
event.preventDefault();
|
||||
event.dataTransfer.dropEffect = 'copy';
|
||||
});
|
||||
registerEventListener(canvas, 'drop', eventHandlers.onCanvasDrop);
|
||||
registerEventListener(document, 'keydown', eventHandlers.onKeyDown);
|
||||
|
||||
// Public API
|
||||
const api = {
|
||||
// Cleanup function for proper disposal
|
||||
destroy() {
|
||||
designer.cancelAnimationFrame();
|
||||
designer.eventListeners.forEach(cleanup => cleanup());
|
||||
designer.eventListeners.clear();
|
||||
},
|
||||
|
||||
// Get current designer state
|
||||
getState() {
|
||||
return {
|
||||
selectedComponent: designer.selectedComponent,
|
||||
selectedConnection: designer.selectedConnection,
|
||||
connectionStart: designer.connectionStart
|
||||
};
|
||||
},
|
||||
|
||||
// Force update all connections
|
||||
updateConnections() {
|
||||
connectionManager.updateAll();
|
||||
},
|
||||
|
||||
// Select component programmatically
|
||||
selectComponent(componentId) {
|
||||
const component = designerContainer.querySelector(`[data-component-id="${componentId}"]`);
|
||||
if (component) {
|
||||
componentManager.select(component);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize connections on load
|
||||
setTimeout(() => connectionManager.updateAll(), 100);
|
||||
|
||||
return api;
|
||||
}
|
||||
|
||||
/**
|
||||
* Binds drag resize functionality to a workflow designer splitter
|
||||
* @param {string} elementId - The base ID of the workflow designer element
|
||||
*/
|
||||
function bindWorkflowDesignerSplitter(elementId) {
|
||||
// Get the elements
|
||||
const designer = document.getElementById(`d_${elementId}`);
|
||||
const splitter = document.getElementById(`s_${elementId}`);
|
||||
const properties = document.getElementById(`p_${elementId}`);
|
||||
const designerMinHeight = parseInt(designer.style.minHeight, 10) || 230;
|
||||
|
||||
if (!designer || !splitter) {
|
||||
console.error("Cannot find all required elements for workflow designer splitter");
|
||||
return;
|
||||
}
|
||||
|
||||
// Initialize drag state
|
||||
let isResizing = false;
|
||||
let startY = 0;
|
||||
let startDesignerHeight = 0;
|
||||
|
||||
// Mouse down event - start dragging
|
||||
splitter.addEventListener('mousedown', (e) => {
|
||||
e.preventDefault();
|
||||
isResizing = true;
|
||||
startY = e.clientY;
|
||||
startDesignerHeight = parseInt(designer.style.height, 10) || designer.parentNode.getBoundingClientRect().height;
|
||||
|
||||
document.body.style.userSelect = 'none'; // Disable text selection
|
||||
document.body.style.cursor = "row-resize"; // Change cursor style globally for horizontal splitter
|
||||
splitter.classList.add('wkf-splitter-active'); // Add class for visual feedback
|
||||
});
|
||||
|
||||
// Mouse move event - update heights while dragging
|
||||
document.addEventListener('mousemove', (e) => {
|
||||
if (!isResizing) return;
|
||||
|
||||
// Calculate new height
|
||||
const deltaY = e.clientY - startY;
|
||||
const newDesignerHeight = Math.max(designerMinHeight, startDesignerHeight + deltaY); // Enforce minimum height
|
||||
designer.style.height = `${newDesignerHeight}px`;
|
||||
|
||||
// Update properties panel height if it exists
|
||||
if (properties) {
|
||||
const containerHeight = designer.parentNode.getBoundingClientRect().height;
|
||||
const propertiesHeight = Math.max(50, containerHeight - newDesignerHeight - splitter.offsetHeight);
|
||||
properties.style.height = `${propertiesHeight}px`;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// Mouse up event - stop dragging
|
||||
document.addEventListener('mouseup', () => {
|
||||
if (!isResizing) return;
|
||||
|
||||
isResizing = false;
|
||||
document.body.style.cursor = ""; // Reset cursor
|
||||
document.body.style.userSelect = ""; // Re-enable text selection
|
||||
splitter.classList.remove('wkf-splitter-active');
|
||||
|
||||
// Store the current state
|
||||
const designerHeight = parseInt(designer.style.height, 10);
|
||||
saveDesignerHeight(elementId, designerHeight);
|
||||
});
|
||||
|
||||
// Handle case when mouse leaves the window
|
||||
document.addEventListener('mouseleave', () => {
|
||||
if (isResizing) {
|
||||
isResizing = false;
|
||||
document.body.style.cursor = ""; // Reset cursor
|
||||
document.body.style.userSelect = ""; // Re-enable text selection
|
||||
splitter.classList.remove('wkf-splitter-active');
|
||||
}
|
||||
});
|
||||
|
||||
// Function to save the designer height
|
||||
function saveDesignerHeight(id, height) {
|
||||
htmx.ajax('POST', '/workflows/resize-designer', {
|
||||
target: `#${elementId}`,
|
||||
headers: {"Content-Type": "application/x-www-form-urlencoded"},
|
||||
swap: "outerHTML",
|
||||
values: {
|
||||
_id: elementId,
|
||||
designer_height: height,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function bindWorkflowProperties(elementId) {
|
||||
let isDragging = false;
|
||||
let isResizing = false;
|
||||
let startX = 0;
|
||||
let startWidths = {};
|
||||
let resizeType = '';
|
||||
|
||||
console.debug("Binding Properties component for "+ elementId)
|
||||
properties_component = document.getElementById(`p_${elementId}`);
|
||||
if (properties_component == null) {
|
||||
console.error(`'Component ' p_${elementId}' is not found !' `)
|
||||
return
|
||||
}
|
||||
const totalWidth = properties_component.getBoundingClientRect().width
|
||||
console.debug("totalWidth", totalWidth)
|
||||
|
||||
const minPropertiesWidth = 352; // this value avoid scroll bars
|
||||
|
||||
const inputSection = document.getElementById(`pi_${elementId}`);
|
||||
const propertiesSection = document.getElementById(`pp_${elementId}`);
|
||||
const outputSection = document.getElementById(`po_${elementId}`);
|
||||
const dragHandle = document.getElementById(`ppt_${elementId}`);
|
||||
const leftHandle = document.getElementById(`ppl_${elementId}`);
|
||||
const rightHandle = document.getElementById(`ppr_${elementId}`);
|
||||
|
||||
// Drag and drop for moving properties section
|
||||
dragHandle.addEventListener('mousedown', (e) => {
|
||||
isDragging = true;
|
||||
startX = e.clientX;
|
||||
startWidths = {
|
||||
input: parseInt(inputSection.style.width),
|
||||
properties: parseInt(propertiesSection.style.width),
|
||||
output: parseInt(outputSection.style.width)
|
||||
};
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// Left resize handle
|
||||
leftHandle.addEventListener('mousedown', (e) => {
|
||||
isResizing = true;
|
||||
resizeType = 'left';
|
||||
startX = e.clientX;
|
||||
startWidths = {
|
||||
input: parseInt(inputSection.style.width),
|
||||
properties: parseInt(propertiesSection.style.width),
|
||||
output: parseInt(outputSection.style.width)
|
||||
};
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// Right resize handle
|
||||
rightHandle.addEventListener('mousedown', (e) => {
|
||||
isResizing = true;
|
||||
resizeType = 'right';
|
||||
startX = e.clientX;
|
||||
startWidths = {
|
||||
input: parseInt(inputSection.style.width),
|
||||
properties: parseInt(propertiesSection.style.width),
|
||||
output: parseInt(outputSection.style.width)
|
||||
};
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// Mouse move
|
||||
document.addEventListener('mousemove', (e) => {
|
||||
if (isDragging) {
|
||||
const deltaX = e.clientX - startX;
|
||||
let newInputWidth = startWidths.input + deltaX;
|
||||
let newOutputWidth = startWidths.output - deltaX;
|
||||
|
||||
// Constraints
|
||||
if (newInputWidth < 0) {
|
||||
newInputWidth = 0;
|
||||
newOutputWidth = totalWidth - startWidths.properties;
|
||||
}
|
||||
if (newOutputWidth < 0) {
|
||||
newOutputWidth = 0;
|
||||
newInputWidth = totalWidth - startWidths.properties;
|
||||
}
|
||||
|
||||
inputSection.style.width = newInputWidth + 'px';
|
||||
outputSection.style.width = newOutputWidth + 'px';
|
||||
}
|
||||
|
||||
if (isResizing) {
|
||||
const deltaX = e.clientX - startX;
|
||||
let newInputWidth = startWidths.input;
|
||||
let newPropertiesWidth = startWidths.properties;
|
||||
let newOutputWidth = startWidths.output;
|
||||
|
||||
if (resizeType === 'left') {
|
||||
newInputWidth = startWidths.input + deltaX;
|
||||
newPropertiesWidth = startWidths.properties - deltaX;
|
||||
|
||||
if (newInputWidth < 0) {
|
||||
newInputWidth = 0;
|
||||
newPropertiesWidth = startWidths.input + startWidths.properties;
|
||||
}
|
||||
if (newPropertiesWidth < minPropertiesWidth) {
|
||||
newPropertiesWidth = minPropertiesWidth;
|
||||
newInputWidth = totalWidth - minPropertiesWidth - startWidths.output;
|
||||
}
|
||||
} else if (resizeType === 'right') {
|
||||
newPropertiesWidth = startWidths.properties + deltaX;
|
||||
newOutputWidth = startWidths.output - deltaX;
|
||||
|
||||
if (newOutputWidth < 0) {
|
||||
newOutputWidth = 0;
|
||||
newPropertiesWidth = startWidths.properties + startWidths.output;
|
||||
}
|
||||
if (newPropertiesWidth < minPropertiesWidth) {
|
||||
newPropertiesWidth = minPropertiesWidth;
|
||||
newOutputWidth = totalWidth - startWidths.input - minPropertiesWidth;
|
||||
}
|
||||
}
|
||||
|
||||
inputSection.style.width = newInputWidth + 'px';
|
||||
propertiesSection.style.width = newPropertiesWidth + 'px';
|
||||
outputSection.style.width = newOutputWidth + 'px';
|
||||
}
|
||||
});
|
||||
|
||||
// Mouse up
|
||||
document.addEventListener('mouseup', () => {
|
||||
if (isDragging || isResizing) {
|
||||
// Send HTMX request with new dimensions
|
||||
const currentWidths = {
|
||||
input_width: parseInt(inputSection.style.width),
|
||||
properties_width: parseInt(propertiesSection.style.width),
|
||||
output_width: parseInt(outputSection.style.width)
|
||||
};
|
||||
|
||||
try {
|
||||
htmx.ajax('POST', '/workflows/update-properties-layout', {
|
||||
target: `#${elementId}`,
|
||||
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
||||
swap: "outerHTML",
|
||||
values: { _id: elementId, ...currentWidths }
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('HTMX request failed:', error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
isDragging = false;
|
||||
isResizing = false;
|
||||
resizeType = '';
|
||||
}
|
||||
});
|
||||
}
|
||||
0
src/components/workflows/assets/__init__.py
Normal file
0
src/components/workflows/assets/__init__.py
Normal file
28
src/components/workflows/assets/icons.py
Normal file
28
src/components/workflows/assets/icons.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
# Fluent Play20Filled
|
||||
icon_play = NotStr(
|
||||
"""<svg name="play" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M17.222 8.685a1.5 1.5 0 0 1 0 2.628l-10 5.498A1.5 1.5 0 0 1 5 15.496V4.502a1.5 1.5 0 0 1 2.223-1.314l10 5.497z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# Fluent Pause20Filled
|
||||
icon_pause = NotStr(
|
||||
"""<svg name="pause" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M12 3.5A1.5 1.5 0 0 1 13.5 2h2A1.5 1.5 0 0 1 17 3.5v13a1.5 1.5 0 0 1-1.5 1.5h-2a1.5 1.5 0 0 1-1.5-1.5v-13zm-9 0A1.5 1.5 0 0 1 4.5 2h2A1.5 1.5 0 0 1 8 3.5v13A1.5 1.5 0 0 1 6.5 18h-2A1.5 1.5 0 0 1 3 16.5v-13z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# Fluent Stop20Filled
|
||||
icon_stop = NotStr(
|
||||
"""<svg name="stop" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M4.5 3A1.5 1.5 0 0 0 3 4.5v11A1.5 1.5 0 0 0 4.5 17h11a1.5 1.5 0 0 0 1.5-1.5v-11A1.5 1.5 0 0 0 15.5 3h-11z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# fluent PlayCircle20Regular
|
||||
icon_play_circle = NotStr(
|
||||
"""<svg name="play" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M9.125 7.184A.75.75 0 0 0 8 7.834v4.333a.75.75 0 0 0 1.125.65l4.125-2.384a.5.5 0 0 0 0-.866L9.125 7.184zM2 10a8 8 0 1 1 16 0a8 8 0 0 1-16 0zm8-7a7 7 0 1 0 0 14a7 7 0 0 0 0-14z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# fluent PauseCircle20Regular
|
||||
icon_pause_circle = NotStr(
|
||||
"""<svg name="pause" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M9 7.5a.5.5 0 0 0-1 0v5a.5.5 0 0 0 1 0v-5zm3 0a.5.5 0 0 0-1 0v5a.5.5 0 0 0 1 0v-5zM10 2a8 8 0 1 0 0 16a8 8 0 0 0 0-16zm-7 8a7 7 0 1 1 14 0a7 7 0 0 1-14 0z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# fluent RecordStop20Regular
|
||||
icon_stop_circle = NotStr(
|
||||
"""<svg name="stop" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M10 3a7 7 0 1 0 0 14a7 7 0 0 0 0-14zm-8 7a8 8 0 1 1 16 0a8 8 0 0 1-16 0zm5-2a1 1 0 0 1 1-1h4a1 1 0 0 1 1 1v4a1 1 0 0 1-1 1H8a1 1 0 0 1-1-1V8z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# fluent ArrowClockwise20Regular
|
||||
icon_refresh = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M3.066 9.05a7 7 0 0 1 12.557-3.22l.126.17H12.5a.5.5 0 1 0 0 1h4a.5.5 0 0 0 .5-.5V2.502a.5.5 0 0 0-1 0v2.207a8 8 0 1 0 1.986 4.775a.5.5 0 0 0-.998.064A7 7 0 1 1 3.066 9.05z" fill="currentColor"></path></g></svg>""")
|
||||
105
src/components/workflows/commands.py
Normal file
105
src/components/workflows/commands.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from components.BaseCommandManager import BaseCommandManager
|
||||
from components.workflows.constants import Routes, ROUTE_ROOT
|
||||
|
||||
|
||||
class WorkflowsCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
super().__init__(owner)
|
||||
|
||||
def request_add_workflow(self):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.AddWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def add_workflow(self, tab_id: str):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.AddWorkflow}",
|
||||
"hx-target": f"#w_{self._id}",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "tab_id": "{tab_id}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
def show_workflow(self, workflow_name):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.ShowWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "name": "{workflow_name}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
|
||||
class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
super().__init__(owner)
|
||||
|
||||
def select_processor(self, component_id: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.SelectProcessor}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-trigger": "change",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
|
||||
}
|
||||
|
||||
def save_properties(self, component_id: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.SaveProperties}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
|
||||
}
|
||||
|
||||
def cancel_properties(self, component_id: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.CancelProperties}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
|
||||
}
|
||||
|
||||
def on_processor_details_event(self, component_id: str, event_name: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.OnProcessorDetailsEvent}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-trigger": "change",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}", "event_name": "{event_name}"}}',
|
||||
}
|
||||
|
||||
def play_workflow(self):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.PlayWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "tab_boundaries": getTabContentBoundaries("{self._owner.tabs_manager.get_id()}")}}',
|
||||
}
|
||||
|
||||
def pause_workflow(self):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.PauseWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def stop_workflow(self):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.StopWorkflow}",
|
||||
"hx-target": f"#{self._owner.tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def refresh(self):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.Refresh}",
|
||||
"hx-swap": "none",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
|
||||
class WorkflowPlayerCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
super().__init__(owner)
|
||||
674
src/components/workflows/components/WorkflowDesigner.py
Normal file
674
src/components/workflows/components/WorkflowDesigner.py
Normal file
@@ -0,0 +1,674 @@
|
||||
import logging
|
||||
|
||||
from fastcore.basics import NotStr
|
||||
from fasthtml.components import *
|
||||
from fasthtml.xtend import Script
|
||||
|
||||
from assets.icons import icon_error
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.undo_redo.constants import UndoRedoAttrs
|
||||
from components.workflows.assets.icons import icon_play, icon_pause, icon_stop, icon_refresh
|
||||
from components.workflows.commands import WorkflowDesignerCommandManager
|
||||
from components.workflows.components.WorkflowDesignerProperties import WorkflowDesignerProperties
|
||||
from components.workflows.components.WorkflowPlayer import WorkflowPlayer
|
||||
from components.workflows.constants import WORKFLOW_DESIGNER_INSTANCE_ID, ProcessorTypes, COMPONENT_TYPES, \
|
||||
PROCESSOR_TYPES
|
||||
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, \
|
||||
Connection, WorkflowsDesignerDbManager, ComponentState, WorkflowsDesignerState
|
||||
from components_helpers import apply_boundaries, mk_tooltip, mk_dialog_buttons, mk_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.jira import JiraRequestTypes, DEFAULT_SEARCH_FIELDS
|
||||
from core.utils import get_unique_id, make_safe_id
|
||||
from utils.ComponentsInstancesHelper import ComponentsInstancesHelper
|
||||
from utils.DbManagementHelper import DbManagementHelper
|
||||
|
||||
logger = logging.getLogger("WorkflowDesigner")
|
||||
|
||||
|
||||
class WorkflowDesigner(BaseComponent):
|
||||
def __init__(self, session,
|
||||
_id=None,
|
||||
settings_manager=None,
|
||||
tabs_manager=None,
|
||||
key: str = None,
|
||||
designer_settings: WorkflowsDesignerSettings = None,
|
||||
boundaries: dict = None):
|
||||
super().__init__(session, _id)
|
||||
self._settings_manager = settings_manager
|
||||
self.tabs_manager = tabs_manager
|
||||
self._key = key
|
||||
self._designer_settings = designer_settings
|
||||
self._db = WorkflowsDesignerDbManager(session, settings_manager)
|
||||
self._undo_redo = ComponentsInstancesHelper.get_undo_redo(session)
|
||||
self._state: WorkflowsDesignerState = self._db.load_state(key)
|
||||
self._boundaries = boundaries
|
||||
self.commands = WorkflowDesignerCommandManager(self)
|
||||
self.properties = WorkflowDesignerProperties(self._session, f"{self._id}", self)
|
||||
|
||||
workflow_name = self._designer_settings.workflow_name
|
||||
self._player = InstanceManager.get(self._session,
|
||||
WorkflowPlayer.create_component_id(self._session, workflow_name),
|
||||
WorkflowPlayer,
|
||||
settings_manager=self._settings_manager,
|
||||
tabs_manager=self.tabs_manager,
|
||||
designer=self,
|
||||
boundaries=boundaries)
|
||||
|
||||
self._error_message = None
|
||||
|
||||
def set_boundaries(self, boundaries: dict):
|
||||
self._boundaries = boundaries
|
||||
|
||||
def get_boundaries(self):
|
||||
return self._boundaries
|
||||
|
||||
def get_state(self) -> WorkflowsDesignerState:
|
||||
return self._state
|
||||
|
||||
def get_db(self):
|
||||
return self._db
|
||||
|
||||
def get_key(self):
|
||||
return self._key
|
||||
|
||||
def refresh_designer(self, oob=False):
|
||||
if oob:
|
||||
return self._mk_canvas(oob)
|
||||
else:
|
||||
return self._mk_elements()
|
||||
|
||||
def refresh_properties(self, oob=False):
|
||||
return self._mk_properties(oob)
|
||||
|
||||
def refresh(self):
|
||||
return self.__ft__(oob=True)
|
||||
|
||||
def refresh_state(self):
|
||||
self._state = self._db.load_state(self._key)
|
||||
self.properties.update_layout()
|
||||
self.properties.update_component(self._state.selected_component_id)
|
||||
return self.__ft__(oob=True)
|
||||
|
||||
def add_component(self, component_type, x, y):
|
||||
self._state.component_counter += 1
|
||||
|
||||
component_id = f"comp_{self._state.component_counter}"
|
||||
info = COMPONENT_TYPES[component_type]
|
||||
|
||||
component = WorkflowComponent(
|
||||
id=component_id,
|
||||
type=component_type,
|
||||
x=int(x),
|
||||
y=int(y),
|
||||
title=info["title"],
|
||||
description=info["description"],
|
||||
properties={"processor_name": PROCESSOR_TYPES[component_type][0]}
|
||||
)
|
||||
self._state.components[component_id] = component
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Add Component '{component_type}'", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs) # update db
|
||||
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def move_component(self, component_id, x, y):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
self._state.selected_component_id = component_id
|
||||
component.x = int(x)
|
||||
component.y = int(y)
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Move Component '{component.title}'", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs) # update db
|
||||
|
||||
return self.refresh_designer(), self.properties.refresh(mode="form", oob=True), self._undo_redo.refresh()
|
||||
|
||||
def delete_component(self, component_id):
|
||||
# Remove component
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
del self._state.components[component_id]
|
||||
|
||||
# Remove related connections
|
||||
self._state.connections = [connection for connection in self._state.connections
|
||||
if connection.from_id != component_id and connection.to_id != component_id]
|
||||
|
||||
# update db
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Remove Component '{component.title}'", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def add_connection(self, from_id, to_id):
|
||||
# Check if connection already exists
|
||||
for connection in self._state.connections:
|
||||
if connection.from_id == from_id and connection.to_id == to_id:
|
||||
return self.refresh_designer() # , self.error_message("Connection already exists")
|
||||
|
||||
connection_id = f"conn_{len(self._state.connections) + 1}"
|
||||
connection = Connection(id=connection_id, from_id=from_id, to_id=to_id)
|
||||
self._state.connections.append(connection)
|
||||
|
||||
# update db
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Add Connection", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def delete_connection(self, from_id, to_id):
|
||||
for connection in self._state.connections:
|
||||
if connection.from_id == from_id and connection.to_id == to_id:
|
||||
self._state.connections.remove(connection)
|
||||
|
||||
# update db
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Delete Connection", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def set_designer_height(self, height):
|
||||
self._state.designer_height = height
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Resize Designer", on_undo=lambda: self.refresh_state())
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
return self.__ft__(), self._undo_redo.refresh() # refresh the whole component
|
||||
|
||||
def update_properties_layout(self, input_width, properties_width, output_width):
|
||||
self._state.properties_input_width = input_width
|
||||
self._state.properties_properties_width = properties_width
|
||||
self._state.properties_output_width = output_width
|
||||
self.properties.update_layout()
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Resize Properties", on_undo=lambda: self.refresh_state())
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
return self.__ft__(), self._undo_redo.refresh() # refresh the whole component
|
||||
|
||||
def select_component(self, component_id):
|
||||
if component_id in self._state.components:
|
||||
self._state.selected_component_id = component_id
|
||||
|
||||
component = self._state.components[component_id]
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Select Component {component.title}", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
|
||||
|
||||
def save_properties(self, component_id: str, details: dict):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
component.properties |= details
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Set properties for {component.title}", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
logger.debug(f"Saved properties for component {component_id}: {details}")
|
||||
|
||||
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
|
||||
|
||||
def cancel_properties(self, component_id: str):
|
||||
if component_id in self._state.components:
|
||||
logger.debug(f"Cancel saving properties for component {component_id}")
|
||||
|
||||
return self.properties.refresh(mode="form")
|
||||
|
||||
def set_selected_processor(self, component_id: str, processor_name: str):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
component.properties = {"processor_name": processor_name}
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Set Processor for {component.title}", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
|
||||
|
||||
def play_workflow(self, boundaries: dict):
|
||||
self._error_message = None
|
||||
|
||||
self._player.run()
|
||||
if self._player.global_error:
|
||||
# Show the error message in the same tab
|
||||
self._error_message = self._player.global_error
|
||||
|
||||
else:
|
||||
|
||||
# change the tab and display the results
|
||||
self._player.set_boundaries(boundaries)
|
||||
self.tabs_manager.add_tab(f"Workflow {self._designer_settings.workflow_name}", self._player, self._player.key)
|
||||
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def stop_workflow(self):
|
||||
self._error_message = None
|
||||
self._player.stop()
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def on_processor_details_event(self, component_id: str, event_name: str, details: dict):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
if event_name == "OnRepositoryChanged":
|
||||
component.properties["repository"] = details["repository"]
|
||||
tables = DbManagementHelper.list_tables(self._session, details["repository"])
|
||||
component.properties["table"] = tables[0] if len(tables) > 0 else None
|
||||
elif event_name == "OnJiraRequestTypeChanged":
|
||||
component.properties["request_type"] = details["request_type"]
|
||||
|
||||
return self.properties.refresh(mode="form")
|
||||
|
||||
def get_workflow_name(self):
|
||||
return self._designer_settings.workflow_name
|
||||
|
||||
def get_workflow_components(self):
|
||||
return self._state.components.values()
|
||||
|
||||
def get_workflow_connections(self):
|
||||
return self._state.connections
|
||||
|
||||
def __ft__(self, oob=False):
|
||||
return Div(
|
||||
H1(f"{self._designer_settings.workflow_name}", cls="text-xl font-bold"),
|
||||
# P("Drag components from the toolbox to the canvas to create your workflow.", cls="text-sm"),
|
||||
Div(
|
||||
self._mk_media(),
|
||||
# self._mk_refresh_button(),
|
||||
self._mk_error_message(),
|
||||
cls="flex mb-2",
|
||||
id=f"t_{self._id}"
|
||||
),
|
||||
self._mk_designer(),
|
||||
Div(cls="wkf-splitter", id=f"s_{self._id}"),
|
||||
self._mk_properties(),
|
||||
Script(f"bindWorkflowDesigner('{self._id}');"),
|
||||
**apply_boundaries(self._boundaries),
|
||||
id=f"{self._id}",
|
||||
hx_swap_oob='true' if oob else None,
|
||||
)
|
||||
|
||||
def _mk_connection_svg(self, conn: Connection):
|
||||
if conn.from_id not in self._state.components or conn.to_id not in self._state.components:
|
||||
return ""
|
||||
|
||||
from_comp = self._state.components[conn.from_id]
|
||||
to_comp = self._state.components[conn.to_id]
|
||||
|
||||
# Calculate connection points (approximate)
|
||||
x1 = from_comp.x + 128 # component width + output point
|
||||
y1 = from_comp.y + 32 # component height / 2
|
||||
x2 = to_comp.x
|
||||
y2 = to_comp.y + 32
|
||||
|
||||
# Create curved path
|
||||
mid_x = (x1 + x2) / 2
|
||||
path = f"M {x1} {y1} C {mid_x} {y1}, {mid_x} {y2}, {x2} {y2}"
|
||||
|
||||
return f"""
|
||||
<svg class="wkf-connection-line" style="left: 0; top: 0; width: 100%; height: 100%;"
|
||||
data-from-id="{conn.from_id}" data-to-id="{conn.to_id}">
|
||||
<path d="{path}" class="wkf-connection-path-thick"/>
|
||||
<path d="{path}" class="wkf-connection-path" marker-end="url(#arrowhead)"/>
|
||||
|
||||
<defs>
|
||||
<marker id="arrowhead" markerWidth="10" markerHeight="7" refX="9" refY="3.5" orient="auto">
|
||||
<polygon points="0 0, 10 3.5, 0 7" class="wkf-connection-path-arrowhead"/>
|
||||
</marker>
|
||||
</defs>
|
||||
</svg>
|
||||
"""
|
||||
|
||||
def _mk_component(self, component: WorkflowComponent):
|
||||
|
||||
runtime_state = self._player.get_component_runtime_state(component.id)
|
||||
|
||||
info = COMPONENT_TYPES[component.type]
|
||||
is_selected = self._state.selected_component_id == component.id
|
||||
tooltip_content = None
|
||||
tooltip_class = ""
|
||||
|
||||
if runtime_state.state == ComponentState.FAILURE:
|
||||
state_class = 'error' # To be styled with a red highlight
|
||||
tooltip_content = runtime_state.error_message
|
||||
tooltip_class = "mmt-tooltip"
|
||||
elif runtime_state.state == ComponentState.NOT_RUN:
|
||||
state_class = 'not-run' # To be styled as greyed-out
|
||||
else:
|
||||
state_class = ''
|
||||
|
||||
return Div(
|
||||
# Input connection point
|
||||
Div(cls="wkf-connection-point wkf-input-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="input"),
|
||||
|
||||
# Component content
|
||||
Div(
|
||||
Span(info["icon"], cls="text-xl mb-1"),
|
||||
H4(component.title, cls="font-semibold text-xs"),
|
||||
cls=f"wkf-component-content {info['color']} {state_class}"
|
||||
),
|
||||
|
||||
# Output connection point
|
||||
Div(cls="wkf-connection-point wkf-output-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="output"),
|
||||
|
||||
cls=f"wkf-workflow-component w-32 {'selected' if is_selected else ''} {tooltip_class}",
|
||||
style=f"left: {component.x}px; top: {component.y}px;",
|
||||
data_component_id=component.id,
|
||||
data_tooltip=tooltip_content,
|
||||
draggable="true"
|
||||
)
|
||||
|
||||
def _mk_elements(self):
|
||||
if len(self._state.components) == 0:
|
||||
return Div("Drag components from the toolbox to the canvas to create your workflow.",
|
||||
cls="flex items-center justify-center h-full w-full"
|
||||
)
|
||||
|
||||
return Div(
|
||||
# Render connections
|
||||
*[NotStr(self._mk_connection_svg(conn)) for conn in self._state.connections],
|
||||
|
||||
# Render components
|
||||
*[self._mk_component(comp) for comp in self._state.components.values()],
|
||||
)
|
||||
|
||||
def _mk_canvas(self, oob=False):
|
||||
return Div(
|
||||
self._mk_elements(),
|
||||
cls=f"wkf-canvas flex-1 rounded-lg border flex-1 {'wkf-canvas-error' if self._error_message else ''}",
|
||||
id=f"c_{self._id}",
|
||||
hx_swap_oob='true' if oob else None,
|
||||
),
|
||||
|
||||
def _mk_toolbox(self):
|
||||
return Div(
|
||||
Div(
|
||||
*[self._mk_toolbox_item(comp_type, info)
|
||||
for comp_type, info in COMPONENT_TYPES.items()],
|
||||
# cls="space-y-1"
|
||||
),
|
||||
cls="wkf-toolbox"
|
||||
)
|
||||
|
||||
def _mk_designer(self):
|
||||
return Div(
|
||||
self._mk_toolbox(), # (Left side)
|
||||
self._mk_canvas(), # (Right side)
|
||||
|
||||
cls="wkf-designer flex gap-1",
|
||||
id=f"d_{self._id}",
|
||||
style=f"height:{self._state.designer_height}px;"
|
||||
)
|
||||
|
||||
def _mk_media(self):
|
||||
return Div(
|
||||
mk_icon(icon_play, cls="mr-1", **self.commands.play_workflow()),
|
||||
mk_icon(icon_pause, cls="mr-1", **self.commands.pause_workflow()),
|
||||
mk_icon(icon_stop, cls="mr-1", **self.commands.stop_workflow()),
|
||||
cls=f"media-controls flex m-2"
|
||||
)
|
||||
|
||||
def _mk_refresh_button(self):
|
||||
return mk_icon(icon_refresh, **self.commands.refresh())
|
||||
|
||||
def _mk_error_message(self):
|
||||
if not self._error_message:
|
||||
return Div()
|
||||
|
||||
return Div(
|
||||
mk_icon(icon_error),
|
||||
Span(self._error_message, cls="text-sm"),
|
||||
role="alert",
|
||||
cls="alert alert-error alert-outline p-1!",
|
||||
hx_swap_oob='true',
|
||||
)
|
||||
|
||||
def _mk_processor_properties(self, component, processor_name):
|
||||
if processor_name == "Jira":
|
||||
return self._mk_jira_processor_details(component)
|
||||
elif processor_name == "Repository":
|
||||
return self._mk_repository_processor_details(component)
|
||||
elif component.type == ProcessorTypes.Filter and processor_name == "Default":
|
||||
return self._mk_filter_processor_details(component)
|
||||
elif component.type == ProcessorTypes.Presenter and processor_name == "Default":
|
||||
return self._mk_presenter_processor_details(component)
|
||||
|
||||
return Div('Not defined yet !')
|
||||
|
||||
def _mk_properties_output(self, component):
|
||||
return Div(
|
||||
"Output name",
|
||||
Input(type="input",
|
||||
name="output_name",
|
||||
placeholder="data",
|
||||
value=component.properties.get("output_name", None),
|
||||
cls="input w-xs"),
|
||||
cls="join"
|
||||
)
|
||||
|
||||
def _mk_properties_details(self, component_id, allow_component_selection=False):
|
||||
def _mk_header():
|
||||
return Div(
|
||||
Div(
|
||||
Span(icon),
|
||||
H4(component.title, cls="font-semibold text-xs"),
|
||||
cls=f"rounded-lg border-2 {color} flex text-center px-2"
|
||||
),
|
||||
H1(component_id, cls="ml-4"),
|
||||
cls="flex mb-2"
|
||||
)
|
||||
|
||||
def _mk_select():
|
||||
return Select(
|
||||
*[Option(processor_name, selected="selected" if processor_name == selected_processor_name else None)
|
||||
for processor_name in PROCESSOR_TYPES[component.type]],
|
||||
cls="select select-sm w-64 mb-2",
|
||||
id="processor_name",
|
||||
name="processor_name",
|
||||
**self.commands.select_processor(component_id)
|
||||
)
|
||||
|
||||
if component_id is None or component_id not in self._state.components and not allow_component_selection:
|
||||
return None
|
||||
else:
|
||||
component_id = self._state.selected_component_id
|
||||
component = self._state.components[component_id]
|
||||
selected_processor_name = component.properties["processor_name"]
|
||||
icon = COMPONENT_TYPES[component.type]["icon"]
|
||||
color = COMPONENT_TYPES[component.type]["color"]
|
||||
return Div(
|
||||
Form(
|
||||
_mk_header(),
|
||||
Div(
|
||||
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Properties", checked="checked"),
|
||||
Div(
|
||||
_mk_select(),
|
||||
self._mk_processor_properties(component, selected_processor_name),
|
||||
cls="tab-content"
|
||||
),
|
||||
|
||||
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Inputs"),
|
||||
Div(
|
||||
"Inputs",
|
||||
cls="tab-content"
|
||||
),
|
||||
|
||||
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Output"),
|
||||
Div(
|
||||
self._mk_properties_output(component),
|
||||
cls="tab-content"
|
||||
),
|
||||
|
||||
cls="tabs tabs-border"
|
||||
),
|
||||
mk_dialog_buttons(cls="mt-4",
|
||||
on_ok=self.commands.save_properties(component_id),
|
||||
on_cancel=self.commands.cancel_properties(component_id)),
|
||||
|
||||
cls="font-mono text-sm",
|
||||
id=f"f_{self._id}_{component_id}",
|
||||
),
|
||||
Script(f"bindFormData('f_{self._id}_{component_id}');")
|
||||
)
|
||||
|
||||
def _mk_properties(self, oob=False):
|
||||
return self.properties
|
||||
|
||||
def _mk_jira_processor_details(self, component):
|
||||
def _mk_option(name):
|
||||
return Option(name.name,
|
||||
value=name.value,
|
||||
selected="selected" if name.value == request_type else None)
|
||||
|
||||
def _mk_input_group():
|
||||
if request_type == JiraRequestTypes.Search.value:
|
||||
return Div(
|
||||
Input(type="text",
|
||||
name="request",
|
||||
value=component.properties.get("request", ""),
|
||||
placeholder="Enter JQL",
|
||||
cls="input w-full"),
|
||||
P("Write your jql code"),
|
||||
)
|
||||
elif request_type == JiraRequestTypes.Comments.value:
|
||||
return Div(
|
||||
Input(type="text",
|
||||
name="request",
|
||||
value=component.properties.get("request", ""),
|
||||
placeholder="Issue id",
|
||||
cls="input w-full"),
|
||||
P("Put the issue id here"),
|
||||
)
|
||||
|
||||
def _mk_extra_parameters():
|
||||
if request_type == JiraRequestTypes.Search.value:
|
||||
return Input(type="text",
|
||||
name="fields",
|
||||
value=component.properties.get("fields", DEFAULT_SEARCH_FIELDS),
|
||||
placeholder="default fields",
|
||||
cls="input w-full ml-2")
|
||||
else:
|
||||
return None
|
||||
|
||||
request_type = component.properties.get("request_type", JiraRequestTypes.Search.value)
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("JQL", cls="fieldset-legend"),
|
||||
Div(
|
||||
Select(
|
||||
*[_mk_option(enum) for enum in JiraRequestTypes],
|
||||
cls="select w-xs",
|
||||
name="request_type",
|
||||
**self.commands.on_processor_details_event(component.id, "OnJiraRequestTypeChanged"),
|
||||
),
|
||||
_mk_extra_parameters(),
|
||||
cls="flex"),
|
||||
_mk_input_group(),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
),
|
||||
)
|
||||
|
||||
def _mk_repository_processor_details(self, component):
|
||||
selected_repo = component.properties.get("repository", None)
|
||||
selected_table = component.properties.get("table", None)
|
||||
|
||||
def _mk_repositories_options():
|
||||
repositories = DbManagementHelper.list_repositories(self._session)
|
||||
if len(repositories) == 0:
|
||||
return [Option("No repository available", disabled=True)]
|
||||
|
||||
return ([Option("Choose a repository", disabled=True, selected="selected" if selected_repo is None else None)] +
|
||||
[Option(repo.name, selected="selected" if repo.name == selected_repo else None)
|
||||
for repo in DbManagementHelper.list_repositories(self._session)])
|
||||
|
||||
def _mk_tables_options():
|
||||
if selected_repo is None:
|
||||
return [Option("No repository selected", disabled=True)]
|
||||
|
||||
tables = DbManagementHelper.list_tables(self._session, selected_repo)
|
||||
if len(tables) == 0:
|
||||
return [Option("No table available", disabled=True)]
|
||||
|
||||
return ([Option("Choose a table", disabled=True, selected="selected" if selected_table is None else None)] +
|
||||
[Option(table, selected="selected" if table == selected_table else None)
|
||||
for table in DbManagementHelper.list_tables(self._session, selected_repo)])
|
||||
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Repository", cls="fieldset-legend"),
|
||||
Div(
|
||||
Select(
|
||||
*_mk_repositories_options(),
|
||||
cls="select w-64",
|
||||
id=f"repository_{self._id}",
|
||||
name="repository",
|
||||
**self.commands.on_processor_details_event(component.id, "OnRepositoryChanged"),
|
||||
),
|
||||
Select(
|
||||
*_mk_tables_options(),
|
||||
cls="select w-64 ml-4",
|
||||
id=f"table_{self._id}",
|
||||
name="table",
|
||||
),
|
||||
cls="flex",
|
||||
),
|
||||
P("Select the source table"),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _mk_filter_processor_details(component):
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Filter", cls="fieldset-legend"),
|
||||
Input(type="text",
|
||||
name="filter",
|
||||
value=component.properties.get("filter", ""),
|
||||
placeholder="Enter filter expression",
|
||||
cls="input w-full"),
|
||||
P("Write your filter expression (python syntax)"),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _mk_presenter_processor_details(component):
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Presenter", cls="fieldset-legend"),
|
||||
Input(type="text",
|
||||
name="columns",
|
||||
value=component.properties.get("columns", ""),
|
||||
placeholder="Columns to display, separated by comma",
|
||||
cls="input w-full"),
|
||||
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
|
||||
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
|
||||
def _get_properties_height(self):
|
||||
print(f"height: {self._boundaries['height']}")
|
||||
return self._boundaries["height"] - self._state.designer_height - 86
|
||||
|
||||
@staticmethod
|
||||
def create_component_id(session, suffix=None):
|
||||
prefix = f"{WORKFLOW_DESIGNER_INSTANCE_ID}{session['user_id']}"
|
||||
if suffix is None:
|
||||
suffix = get_unique_id()
|
||||
|
||||
return make_safe_id(f"{prefix}{suffix}")
|
||||
|
||||
@staticmethod
|
||||
def _mk_toolbox_item(component_type: str, info: dict):
|
||||
return Div(
|
||||
mk_tooltip(
|
||||
Div(
|
||||
Span(info["icon"], cls="mb-2"),
|
||||
H4(info["title"], cls="font-semibold text-xs"),
|
||||
cls=f"p-2 rounded-lg border-2 {info['color']} flex text-center"
|
||||
),
|
||||
tooltip=info["description"]),
|
||||
cls="wkf-toolbox-item p-2",
|
||||
draggable="true",
|
||||
data_type=component_type
|
||||
)
|
||||
@@ -0,0 +1,345 @@
|
||||
from fasthtml.common import *
|
||||
from dataclasses import dataclass
|
||||
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.workflows.constants import COMPONENT_TYPES, PROCESSOR_TYPES
|
||||
from components_helpers import mk_dialog_buttons
|
||||
from core.jira import JiraRequestTypes, DEFAULT_SEARCH_FIELDS
|
||||
from utils.DbManagementHelper import DbManagementHelper
|
||||
|
||||
|
||||
@dataclass
|
||||
class DesignerLayout:
|
||||
input_width: int
|
||||
properties_width: int
|
||||
output_width: int
|
||||
|
||||
|
||||
class WorkflowDesignerProperties(BaseComponent):
|
||||
def __init__(self, session, instance_id, owner):
|
||||
super().__init__(session, instance_id)
|
||||
self._owner = owner
|
||||
self._boundaries = self._owner.get_boundaries()
|
||||
self._commands = self._owner.commands
|
||||
self.layout = None
|
||||
self._component = None
|
||||
self.update_layout()
|
||||
self.update_component(self._owner.get_state().selected_component_id)
|
||||
|
||||
def update_layout(self):
|
||||
if self._owner.get_state().properties_input_width is None:
|
||||
input_width = self._boundaries["width"] // 3
|
||||
properties_width = self._boundaries["width"] // 3
|
||||
output_width = self._boundaries["width"] - input_width - properties_width
|
||||
else:
|
||||
input_width = self._owner.get_state().properties_input_width
|
||||
properties_width = self._owner.get_state().properties_properties_width
|
||||
output_width = self._owner.get_state().properties_output_width
|
||||
|
||||
self.layout = DesignerLayout(
|
||||
input_width=input_width,
|
||||
properties_width=properties_width,
|
||||
output_width=output_width
|
||||
)
|
||||
|
||||
def update_component(self, component_id):
|
||||
if component_id is None or component_id not in self._owner.get_state().components:
|
||||
self._component = None
|
||||
else:
|
||||
self._component = self._owner.get_state().components[component_id]
|
||||
|
||||
def refresh(self, mode="all", oob=False):
|
||||
self.update_component(self._owner.get_state().selected_component_id)
|
||||
if mode == "form":
|
||||
return self._mk_content(oob=oob)
|
||||
|
||||
return self.__ft__(oob=oob)
|
||||
|
||||
def _mk_layout(self):
|
||||
return Div(
|
||||
self._mk_input(),
|
||||
self._mk_properties(),
|
||||
self._mk_output(),
|
||||
cls="flex",
|
||||
style="height: 100%; width: 100%; flex: 1;"
|
||||
)
|
||||
|
||||
def _mk_input(self):
|
||||
return Div(
|
||||
"Input",
|
||||
id=f"pi_{self._id}",
|
||||
style=f"width: {self.layout.input_width}px;",
|
||||
cls="wkf-properties-input"
|
||||
)
|
||||
|
||||
def _mk_output(self):
|
||||
return Div(
|
||||
"Output",
|
||||
id=f"po_{self._id}",
|
||||
style=f"width: {self.layout.output_width}px;",
|
||||
cls="wkf-properties-output"
|
||||
)
|
||||
|
||||
def _mk_properties(self):
|
||||
return Div(
|
||||
# Drag handle (20px height)
|
||||
Div(
|
||||
A(cls="wkf-properties-handle-top"),
|
||||
cls="wkf-properties-top",
|
||||
id=f"ppt_{self._id}",
|
||||
),
|
||||
|
||||
# Properties content
|
||||
self._mk_content(),
|
||||
|
||||
# Left resize handle
|
||||
Div(
|
||||
id=f"ppl_{self._id}",
|
||||
cls="wkf-properties-handle-left"
|
||||
),
|
||||
|
||||
# Right resize handle
|
||||
Div(
|
||||
id=f"ppr_{self._id}",
|
||||
cls="wkf-properties-handle-right"
|
||||
),
|
||||
|
||||
id=f"pp_{self._id}",
|
||||
style=f"width: {self.layout.properties_width}px; height: 100%;",
|
||||
cls="wkf-properties-properties flex flex-col",
|
||||
)
|
||||
|
||||
def _mk_content(self, oob=False):
|
||||
|
||||
return Div(
|
||||
self._header(),
|
||||
self._form(),
|
||||
cls="wkf-properties-content",
|
||||
id=f"ppc_{self._id}",
|
||||
hx_swap_oob=f'true' if oob else None,
|
||||
)
|
||||
|
||||
def _header(self):
|
||||
if self._component is None:
|
||||
return None
|
||||
|
||||
icon = COMPONENT_TYPES[self._component.type]["icon"]
|
||||
color = COMPONENT_TYPES[self._component.type]["color"]
|
||||
return Div(
|
||||
Div(
|
||||
Span(icon),
|
||||
H4(self._component.title, cls="font-semibold text-xs"),
|
||||
cls=f"rounded-lg border-2 {color} flex text-center px-2"
|
||||
),
|
||||
Div(self._component.id, cls="ml-2"),
|
||||
cls="flex wkf-properties-content-header",
|
||||
)
|
||||
|
||||
def _form(self):
|
||||
if self._component is None:
|
||||
return None
|
||||
|
||||
component_id = self._component.id
|
||||
return Form(
|
||||
Div(
|
||||
self._mk_select_processor(),
|
||||
self._content_details(),
|
||||
style="flex-grow: 1; overflow-y: auto;"
|
||||
),
|
||||
mk_dialog_buttons(cls="pb-2",
|
||||
on_ok=self._commands.save_properties(component_id),
|
||||
on_cancel=self._commands.cancel_properties(component_id)
|
||||
),
|
||||
id=f"ppf_{self._id}",
|
||||
cls="wkf-properties-content-form",
|
||||
)
|
||||
|
||||
def _mk_select_processor(self):
|
||||
selected_processor_name = self._component.properties.get("processor_name", None)
|
||||
return Select(
|
||||
*[Option(processor_name, selected="selected" if processor_name == selected_processor_name else None)
|
||||
for processor_name in PROCESSOR_TYPES[self._component.type]],
|
||||
cls="select select-sm m-2",
|
||||
id="processor_name",
|
||||
name="processor_name",
|
||||
**self._commands.select_processor(self._component.id)
|
||||
)
|
||||
|
||||
def _content_details(self):
|
||||
component_type = self._component.type
|
||||
processor_name = self._component.properties.get("processor_name", None)
|
||||
key = f"_mk_details_{component_type}_{processor_name}".lower()
|
||||
if hasattr(self, key):
|
||||
return getattr(self, key)()
|
||||
else:
|
||||
return Div(f"Component '{key}' not found")
|
||||
|
||||
def _mk_details_producer_jira(self):
|
||||
def _mk_option(name):
|
||||
"""
|
||||
Generic helper to create options
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
return Option(name.name,
|
||||
value=name.value,
|
||||
selected="selected" if name.value == request_type else None)
|
||||
|
||||
def _mk_input_group():
|
||||
if request_type == JiraRequestTypes.Search.value or request_type == "issues": # remove issues at some point
|
||||
return [
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_fields",
|
||||
value=self._component.properties.get(f"{request_type}_fields", DEFAULT_SEARCH_FIELDS),
|
||||
placeholder="default fields",
|
||||
cls="input w-full"),
|
||||
P("Jira fields to retrieve"),
|
||||
),
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_request",
|
||||
value=self._component.properties.get(f"{request_type}_request", ""),
|
||||
placeholder="Enter JQL",
|
||||
cls="input w-full"),
|
||||
P("Write your jql code"),
|
||||
)
|
||||
]
|
||||
elif request_type in (JiraRequestTypes.Issue.value, JiraRequestTypes.Comments.value):
|
||||
return [
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_request",
|
||||
value=self._component.properties.get(f"{request_type}_request", ""),
|
||||
placeholder="Issue id",
|
||||
cls="input w-full"),
|
||||
P("Put the issue id here"),
|
||||
)
|
||||
]
|
||||
elif request_type == JiraRequestTypes.Versions.value:
|
||||
return [
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_request",
|
||||
value=self._component.properties.get(f"{request_type}_request", ""),
|
||||
placeholder="Project key",
|
||||
cls="input w-full"),
|
||||
P("Enter the project key"),
|
||||
)
|
||||
]
|
||||
else:
|
||||
return [Div(f"** Not Implemented ** ('{request_type}' not supported yet)")]
|
||||
|
||||
request_type = self._component.properties.get("request_type", JiraRequestTypes.Search.value)
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Jira", cls="fieldset-legend"),
|
||||
Div(
|
||||
Select(
|
||||
*[_mk_option(enum) for enum in JiraRequestTypes],
|
||||
cls="select w-xs",
|
||||
name="request_type",
|
||||
**self._commands.on_processor_details_event(self._component.id, "OnJiraRequestTypeChanged"),
|
||||
),
|
||||
P("Jira ressource type"),
|
||||
cls="mb-4"
|
||||
),
|
||||
*_mk_input_group(),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
),
|
||||
)
|
||||
|
||||
def _mk_details_producer_repository(self):
|
||||
selected_repo = self._component.properties.get("repository", None)
|
||||
selected_table = self._component.properties.get("table", None)
|
||||
|
||||
def _mk_repositories_options():
|
||||
repositories = DbManagementHelper.list_repositories(self._session)
|
||||
if len(repositories) == 0:
|
||||
return [Option("No repository available", disabled=True)]
|
||||
|
||||
return ([Option("Choose a repository", disabled=True, selected="selected" if selected_repo is None else None)] +
|
||||
[Option(repo.name, selected="selected" if repo.name == selected_repo else None)
|
||||
for repo in DbManagementHelper.list_repositories(self._session)])
|
||||
|
||||
def _mk_tables_options():
|
||||
if selected_repo is None:
|
||||
return [Option("No repository selected", disabled=True, selected="selected")]
|
||||
|
||||
tables = DbManagementHelper.list_tables(self._session, selected_repo)
|
||||
if len(tables) == 0:
|
||||
return [Option("No table available", disabled=True)]
|
||||
|
||||
return ([Option("Choose a table", disabled=True, selected="selected" if selected_table is None else None)] +
|
||||
[Option(table, selected="selected" if table == selected_table else None)
|
||||
for table in DbManagementHelper.list_tables(self._session, selected_repo)])
|
||||
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Repository", cls="fieldset-legend"),
|
||||
Div(
|
||||
Select(
|
||||
*_mk_repositories_options(),
|
||||
cls="select w-64",
|
||||
id=f"repository_{self._id}",
|
||||
name="repository",
|
||||
**self._commands.on_processor_details_event(self._component.id, "OnRepositoryChanged"),
|
||||
),
|
||||
P("Select the repository"),
|
||||
),
|
||||
Div(
|
||||
Select(
|
||||
*_mk_tables_options(),
|
||||
cls="select w-64",
|
||||
id=f"table_{self._id}",
|
||||
name="table",
|
||||
),
|
||||
P("Select the table"),
|
||||
),
|
||||
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
|
||||
)
|
||||
)
|
||||
|
||||
def _mk_details_filter_default(self):
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Filter", cls="fieldset-legend"),
|
||||
Input(type="text",
|
||||
name="filter",
|
||||
value=self._component.properties.get("filter", ""),
|
||||
placeholder="Filter expression",
|
||||
cls="input w-full"),
|
||||
P("Filter expression"),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
|
||||
def _mk_details_presenter_default(self):
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Presenter", cls="fieldset-legend"),
|
||||
Input(type="text",
|
||||
name="columns",
|
||||
value=self._component.properties.get("columns", ""),
|
||||
placeholder="Columns to display, separated by comma",
|
||||
cls="input w-full"),
|
||||
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
|
||||
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
|
||||
def __ft__(self, oob=False):
|
||||
# return self.render()
|
||||
return Div(
|
||||
self._mk_layout(),
|
||||
style=f"height: {self._get_height()}px;",
|
||||
id=f"p_{self._id}",
|
||||
hx_swap_oob=f'innerHTML' if oob else None,
|
||||
cls="wkf-properties"
|
||||
)
|
||||
|
||||
def _get_height(self):
|
||||
return self._boundaries["height"] - self._owner.get_state().designer_height - 86
|
||||
233
src/components/workflows/components/WorkflowPlayer.py
Normal file
233
src/components/workflows/components/WorkflowPlayer.py
Normal file
@@ -0,0 +1,233 @@
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pandas as pd
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.datagrid_new.components.DataGrid import DataGrid
|
||||
from components.datagrid_new.settings import DataGridSettings
|
||||
from components.workflows.commands import WorkflowPlayerCommandManager
|
||||
from components.workflows.constants import WORKFLOW_PLAYER_INSTANCE_ID, ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowComponentRuntimeState, \
|
||||
WorkflowComponent, ComponentState
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.utils import get_unique_id, make_safe_id
|
||||
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataFilter, JiraDataProducer
|
||||
|
||||
grid_settings = DataGridSettings(
|
||||
header_visible=True,
|
||||
filter_all_visible=True,
|
||||
views_visible=False,
|
||||
open_file_visible=False,
|
||||
open_settings_visible=False)
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsPlayerError(Exception):
|
||||
component_id: str
|
||||
error: Exception
|
||||
|
||||
|
||||
class WorkflowPlayer(BaseComponent):
|
||||
def __init__(self, session,
|
||||
_id=None,
|
||||
settings_manager=None,
|
||||
tabs_manager=None,
|
||||
designer=None,
|
||||
boundaries: dict = None):
|
||||
super().__init__(session, _id)
|
||||
self._settings_manager = settings_manager
|
||||
self.tabs_manager = tabs_manager
|
||||
self._designer = designer
|
||||
self.key = f"__WorkflowPlayer_{designer.get_workflow_name()}"
|
||||
self._boundaries = boundaries
|
||||
self.commands = WorkflowPlayerCommandManager(self)
|
||||
self._datagrid = InstanceManager.get(self._session,
|
||||
DataGrid.create_component_id(session),
|
||||
DataGrid,
|
||||
key=self.key,
|
||||
grid_settings=grid_settings,
|
||||
boundaries=boundaries)
|
||||
self.runtime_states = {}
|
||||
self.global_error = None
|
||||
self.has_error = False
|
||||
|
||||
def set_boundaries(self, boundaries: dict):
|
||||
self._datagrid.set_boundaries(boundaries)
|
||||
|
||||
def get_component_runtime_state(self, component_id: str):
|
||||
# return a default value if the player hasn't been played yet
|
||||
return self.runtime_states.get(component_id, WorkflowComponentRuntimeState(component_id))
|
||||
|
||||
def run(self):
|
||||
# at least one connection is required to play
|
||||
if len(self._designer.get_workflow_connections()) == 0:
|
||||
self.global_error = "No connections defined."
|
||||
return
|
||||
|
||||
self._init_state(ComponentState.NOT_RUN)
|
||||
|
||||
try:
|
||||
sorted_components = self._get_sorted_components()
|
||||
engine = self._get_engine(sorted_components)
|
||||
|
||||
except ValueError as e:
|
||||
# Handle workflow structure errors (e.g., cycles)
|
||||
self.has_error = True
|
||||
self.global_error = f"Workflow configuration error: {e}"
|
||||
return
|
||||
except WorkflowsPlayerError as ex:
|
||||
self.has_error = True
|
||||
self.global_error = self._get_global_error_as_str(ex, "Failed to init ")
|
||||
if ex.component_id in self.runtime_states:
|
||||
self.runtime_states[ex.component_id].state = ComponentState.FAILURE
|
||||
self.runtime_states[ex.component_id].error_message = str(ex.error)
|
||||
return
|
||||
|
||||
res = engine.run_to_list()
|
||||
|
||||
if engine.has_error and not engine.errors:
|
||||
self.has_error = True
|
||||
self.global_error = engine.global_error
|
||||
|
||||
else: # loop through the components and update the runtime states
|
||||
for component in sorted_components:
|
||||
runtime_state = self.runtime_states.get(component.id)
|
||||
|
||||
if component.id not in engine.errors:
|
||||
runtime_state.state = ComponentState.SUCCESS
|
||||
continue
|
||||
|
||||
# the component failed
|
||||
error = engine.errors[component.id]
|
||||
runtime_state.state = ComponentState.FAILURE
|
||||
runtime_state.error_message = str(error)
|
||||
self.global_error = self._get_global_error_as_str(error, "Error in ") # update global error as well
|
||||
self.has_error = True
|
||||
break # the remaining components will remain as NOT_RUN
|
||||
|
||||
data = [row.as_dict() for row in res]
|
||||
df = pd.DataFrame(data)
|
||||
self._datagrid.init_from_dataframe(df)
|
||||
|
||||
def stop(self):
|
||||
self._init_state()
|
||||
|
||||
def get_dataframe(self):
|
||||
return self._datagrid.get_dataframe()
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
self._datagrid,
|
||||
id=self._id,
|
||||
)
|
||||
|
||||
def _get_sorted_components(self) -> list[WorkflowComponent]:
|
||||
"""
|
||||
Sorts the workflow components based on their connections using topological sort.
|
||||
|
||||
- A connection from component A to B means A must come before B.
|
||||
- Raises a ValueError if a cycle is detected.
|
||||
- Raises a ValueError if a connection references a non-existent component.
|
||||
- Ignores components that are not part of any connection.
|
||||
|
||||
:return: A list of sorted WorkflowComponent objects.
|
||||
"""
|
||||
components_by_id = {c.id: c for c in self._designer.get_workflow_components()}
|
||||
|
||||
# Get all component IDs involved in connections
|
||||
involved_ids = set()
|
||||
for conn in self._designer.get_workflow_connections():
|
||||
involved_ids.add(conn.from_id)
|
||||
involved_ids.add(conn.to_id)
|
||||
|
||||
# Check if all involved components exist
|
||||
for component_id in involved_ids:
|
||||
if component_id not in components_by_id:
|
||||
raise ValueError(f"Component with ID '{component_id}' referenced in connections but does not exist.")
|
||||
|
||||
# Build the graph (adjacency list and in-degrees) for involved components
|
||||
adj = {cid: [] for cid in involved_ids}
|
||||
in_degree = {cid: 0 for cid in involved_ids}
|
||||
|
||||
for conn in self._designer.get_workflow_connections():
|
||||
# from_id -> to_id
|
||||
adj[conn.from_id].append(conn.to_id)
|
||||
in_degree[conn.to_id] += 1
|
||||
|
||||
# Find all sources (nodes with in-degree 0)
|
||||
queue = deque([cid for cid in involved_ids if in_degree[cid] == 0])
|
||||
|
||||
sorted_order = []
|
||||
while queue:
|
||||
u = queue.popleft()
|
||||
sorted_order.append(u)
|
||||
|
||||
for v in adj.get(u, []):
|
||||
in_degree[v] -= 1
|
||||
if in_degree[v] == 0:
|
||||
queue.append(v)
|
||||
|
||||
# Check for cycles
|
||||
if len(sorted_order) != len(involved_ids):
|
||||
raise ValueError("A cycle was detected in the workflow connections.")
|
||||
|
||||
# Return sorted components
|
||||
return [components_by_id[cid] for cid in sorted_order]
|
||||
|
||||
def _get_engine(self, sorted_components):
|
||||
# first reorder the component, according to the connection definitions
|
||||
engine = WorkflowEngine()
|
||||
for component in sorted_components:
|
||||
key = (component.type, component.properties["processor_name"])
|
||||
try:
|
||||
if key == (ProcessorTypes.Producer, "Repository"):
|
||||
engine.add_processor(
|
||||
TableDataProducer(self._session,
|
||||
self._settings_manager,
|
||||
component.id,
|
||||
component.properties["repository"],
|
||||
component.properties["table"]))
|
||||
elif key == (ProcessorTypes.Producer, "Jira"):
|
||||
request_type = component.properties["request_type"]
|
||||
engine.add_processor(
|
||||
JiraDataProducer(self._session,
|
||||
self._settings_manager,
|
||||
component.id,
|
||||
component.properties["request_type"],
|
||||
component.properties[f"{request_type}_request"],
|
||||
component.properties.get(f"{request_type}_fields", None)))
|
||||
elif key == (ProcessorTypes.Filter, "Default"):
|
||||
engine.add_processor(DefaultDataFilter(component.id, component.properties["filter"]))
|
||||
elif key == (ProcessorTypes.Presenter, "Default"):
|
||||
engine.add_processor(DefaultDataPresenter(component.id, component.properties["columns"]))
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unsupported processor : type={component.type}, name={component.properties['processor_name']}")
|
||||
except Exception as e:
|
||||
raise WorkflowsPlayerError(component.id, e)
|
||||
|
||||
return engine
|
||||
|
||||
def _init_state(self, state: ComponentState = ComponentState.SUCCESS):
|
||||
self.global_error = None
|
||||
self.has_error = False
|
||||
self.runtime_states = {component.id: WorkflowComponentRuntimeState(component.id, state)
|
||||
for component in self._designer.get_workflow_components()}
|
||||
|
||||
@staticmethod
|
||||
def create_component_id(session, suffix=None):
|
||||
prefix = f"{WORKFLOW_PLAYER_INSTANCE_ID}{session['user_id']}"
|
||||
if suffix is None:
|
||||
suffix = get_unique_id()
|
||||
|
||||
return make_safe_id(f"{prefix}{suffix}")
|
||||
|
||||
@staticmethod
|
||||
def _get_global_error_as_str(error, prefix=""):
|
||||
if hasattr(error, "component_id"):
|
||||
return f"{prefix}component '{error.component_id}': {error.error}"
|
||||
else:
|
||||
return str(error)
|
||||
132
src/components/workflows/components/Workflows.py
Normal file
132
src/components/workflows/components/Workflows.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import logging
|
||||
|
||||
from fasthtml.components import *
|
||||
|
||||
from assets.icons import icon_add_regular
|
||||
from components.BaseComponent import BaseComponentSingleton
|
||||
from components.form.components.MyForm import MyForm, FormField
|
||||
from components.workflows.commands import WorkflowsCommandManager
|
||||
from components.workflows.components.WorkflowDesigner import WorkflowDesigner
|
||||
from components.workflows.constants import WORKFLOWS_INSTANCE_ID
|
||||
from components.workflows.db_management import WorkflowsDbManager, WorkflowsDesignerSettings
|
||||
from components_helpers import mk_ellipsis, mk_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
|
||||
logger = logging.getLogger("Workflows")
|
||||
|
||||
|
||||
class Workflows(BaseComponentSingleton):
|
||||
COMPONENT_INSTANCE_ID = WORKFLOWS_INSTANCE_ID
|
||||
|
||||
def __init__(self, session, _id, settings_manager=None, tabs_manager=None):
|
||||
super().__init__(session, _id, settings_manager, tabs_manager)
|
||||
self.commands = WorkflowsCommandManager(self)
|
||||
self.db = WorkflowsDbManager(session, settings_manager)
|
||||
|
||||
def request_new_workflow(self):
|
||||
# request for a new tab_id
|
||||
new_tab_id = self.tabs_manager.request_new_tab_id()
|
||||
|
||||
# create a new form to ask for the details of the new database
|
||||
add_workflow_form = self._mk_add_workflow_form(new_tab_id)
|
||||
|
||||
# create and display the form in a new tab
|
||||
self.tabs_manager.add_tab("Add Workflow", add_workflow_form, tab_id=new_tab_id)
|
||||
return self.tabs_manager
|
||||
|
||||
def add_new_workflow(self, tab_id: str, form_id: str, workflow_name: str, tab_boundaries: dict):
|
||||
"""
|
||||
|
||||
:param tab_id: tab id where the table content will be displayed (and where the form was displayed)
|
||||
:param form_id: form used to give the repository name (to be used in case of error)
|
||||
:param workflow_name: new workflow name
|
||||
:param tab_boundaries: tab boundaries
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
# Add the new repository and its default table to the list of repositories
|
||||
self.db.add_workflow(workflow_name)
|
||||
|
||||
# update the tab content with table content
|
||||
self.tabs_manager.set_tab_content(tab_id,
|
||||
self._get_workflow_designer(workflow_name, tab_boundaries),
|
||||
title=workflow_name,
|
||||
key=f"{self._id}_{workflow_name}",
|
||||
active=True)
|
||||
|
||||
return self._mk_workflows(), self.tabs_manager.refresh()
|
||||
|
||||
except ValueError as ex:
|
||||
logger.error(f" Workflow '{workflow_name}' already exists.")
|
||||
add_repository_form = InstanceManager.get(self._session, form_id)
|
||||
add_repository_form.set_error(ex)
|
||||
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def show_workflow(self, workflow_name: str, tab_boundaries: dict):
|
||||
tab_key = f"{self._id}_{workflow_name}"
|
||||
if tab_key not in self.tabs_manager.tabs:
|
||||
self.tabs_manager.add_tab(workflow_name,
|
||||
self._get_workflow_designer(workflow_name, tab_boundaries),
|
||||
key=tab_key)
|
||||
else:
|
||||
workflow_designer = self.tabs_manager.get_tab_content_by_key(tab_key)
|
||||
workflow_designer.set_boundaries(tab_boundaries)
|
||||
|
||||
self.tabs_manager.select_tab_by_key(tab_key)
|
||||
self.db.select_workflow(workflow_name)
|
||||
return self.refresh(), self.tabs_manager.refresh()
|
||||
|
||||
def refresh(self):
|
||||
return self._mk_workflows(True)
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
Div(cls="divider"),
|
||||
Div(
|
||||
mk_ellipsis("Workflows", cls="text-sm font-medium mb-1"),
|
||||
mk_icon(icon_add_regular,
|
||||
size=16,
|
||||
tooltip="Add Workflow",
|
||||
cls="ml-2 mmt-visible-on-hover",
|
||||
**self.commands.request_add_workflow()),
|
||||
cls="flex"
|
||||
),
|
||||
self._mk_workflows(),
|
||||
id=f"{self._id}"
|
||||
)
|
||||
|
||||
def _get_workflow_designer(self, workflow_name: str, tab_boundaries: dict):
|
||||
return InstanceManager.get(self._session,
|
||||
WorkflowDesigner.create_component_id(self._session, workflow_name),
|
||||
WorkflowDesigner,
|
||||
settings_manager=self._settings_manager,
|
||||
tabs_manager=self.tabs_manager,
|
||||
key=workflow_name,
|
||||
designer_settings=WorkflowsDesignerSettings(workflow_name=workflow_name),
|
||||
boundaries=tab_boundaries)
|
||||
|
||||
def _mk_add_workflow_form(self, tab_id: str):
|
||||
return InstanceManager.get(self._session, MyForm.create_component_id(self._session), MyForm,
|
||||
title="Add Workflow",
|
||||
fields=[FormField("name", 'Workflow Name', 'input')],
|
||||
htmx_request=self.commands.add_workflow(tab_id),
|
||||
)
|
||||
|
||||
def _mk_workflow(self, workflow_name: str, selected: bool):
|
||||
elt = mk_ellipsis(workflow_name, cls="text-sm", **self.commands.show_workflow(workflow_name))
|
||||
if selected:
|
||||
return Div(
|
||||
elt,
|
||||
cls="items-center mmt-selected"
|
||||
)
|
||||
else:
|
||||
return elt
|
||||
|
||||
def _mk_workflows(self, oob=False):
|
||||
return Div(
|
||||
*[self._mk_workflow(workflow_name, workflow_name == self.db.get_selected_workflow())
|
||||
for workflow_name in self.db.get_workflows()],
|
||||
id=f"w_{self._id}",
|
||||
hx_swap_oob="true" if oob else None,
|
||||
)
|
||||
0
src/components/workflows/components/__init__.py
Normal file
0
src/components/workflows/components/__init__.py
Normal file
64
src/components/workflows/constants.py
Normal file
64
src/components/workflows/constants.py
Normal file
@@ -0,0 +1,64 @@
|
||||
WORKFLOWS_INSTANCE_ID = "__Workflows__"
|
||||
WORKFLOW_DESIGNER_INSTANCE_ID = "__WorkflowDesigner__"
|
||||
WORKFLOW_PLAYER_INSTANCE_ID = "__WorkflowPlayer__"
|
||||
WORKFLOWS_DB_ENTRY = "Workflows"
|
||||
WORKFLOW_DESIGNER_DB_ENTRY = "WorkflowDesigner"
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY = "Settings"
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY = "State"
|
||||
|
||||
|
||||
class ProcessorTypes:
|
||||
Producer = "producer"
|
||||
Filter = "filter"
|
||||
Presenter = "presenter"
|
||||
|
||||
|
||||
COMPONENT_TYPES = {
|
||||
ProcessorTypes.Producer: {
|
||||
"title": "Data Producer",
|
||||
"description": "Generates or loads data",
|
||||
"icon": "📊",
|
||||
"color": "bg-green-100 border-green-300 text-neutral"
|
||||
},
|
||||
ProcessorTypes.Filter: {
|
||||
"title": "Data Filter",
|
||||
"description": "Filters and transforms data",
|
||||
"icon": "🔍",
|
||||
"color": "bg-blue-100 border-blue-300 text-neutral"
|
||||
},
|
||||
ProcessorTypes.Presenter: {
|
||||
"title": "Data Presenter",
|
||||
"description": "Displays or exports data",
|
||||
"icon": "📋",
|
||||
"color": "bg-purple-100 border-purple-300 text-neutral"
|
||||
}
|
||||
}
|
||||
|
||||
PROCESSOR_TYPES = {
|
||||
ProcessorTypes.Producer: ["Repository", "Jira"],
|
||||
ProcessorTypes.Filter: ["Default"],
|
||||
ProcessorTypes.Presenter: ["Default"]}
|
||||
|
||||
ROUTE_ROOT = "/workflows"
|
||||
|
||||
|
||||
class Routes:
|
||||
AddWorkflow = "/add-workflow"
|
||||
SelectWorkflow = "/select-workflow"
|
||||
ShowWorkflow = "/show-workflow"
|
||||
SelectComponent = "/select-component"
|
||||
AddComponent = "/add-component"
|
||||
MoveComponent = "/move-component"
|
||||
DeleteComponent = "/delete-component"
|
||||
AddConnection = "/add-connection"
|
||||
DeleteConnection = "/delete-connection"
|
||||
ResizeDesigner = "/resize-designer"
|
||||
UpdatePropertiesLayout = "/update-properties-layout"
|
||||
SaveProperties = "/save-properties"
|
||||
CancelProperties = "/cancel-properties"
|
||||
SelectProcessor = "/select-processor"
|
||||
OnProcessorDetailsEvent = "/on-processor-details-event"
|
||||
PlayWorkflow = "/play-workflow"
|
||||
PauseWorkflow = "/pause-workflow"
|
||||
StopWorkflow = "/stop-workflow"
|
||||
Refresh = "/refresh"
|
||||
209
src/components/workflows/db_management.py
Normal file
209
src/components/workflows/db_management.py
Normal file
@@ -0,0 +1,209 @@
|
||||
import enum
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from components.undo_redo.constants import UndoRedoAttrs
|
||||
from components.workflows.constants import WORKFLOWS_DB_ENTRY, WORKFLOW_DESIGNER_DB_ENTRY, \
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY, WORKFLOW_DESIGNER_DB_STATE_ENTRY
|
||||
from core.settings_management import SettingsManager
|
||||
from core.utils import make_safe_id
|
||||
from utils.ComponentsInstancesHelper import ComponentsInstancesHelper
|
||||
|
||||
logger = logging.getLogger("WorkflowsSettings")
|
||||
|
||||
|
||||
class ComponentState(enum.Enum):
|
||||
"""
|
||||
Represents the execution state of a workflow component.
|
||||
"""
|
||||
SUCCESS = "success"
|
||||
FAILURE = "failure"
|
||||
NOT_RUN = "not_run"
|
||||
|
||||
|
||||
# Data structures
|
||||
@dataclass
|
||||
class WorkflowComponent:
|
||||
id: str
|
||||
type: str
|
||||
x: int
|
||||
y: int
|
||||
title: str
|
||||
description: str
|
||||
properties: dict
|
||||
|
||||
|
||||
@dataclass
|
||||
class Connection:
|
||||
id: str
|
||||
from_id: str
|
||||
to_id: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowComponentRuntimeState:
|
||||
"""
|
||||
Represents the runtime state of a single workflow component.
|
||||
"""
|
||||
id: str
|
||||
state: ComponentState = ComponentState.SUCCESS
|
||||
error_message: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsDesignerSettings:
|
||||
workflow_name: str = "No Name"
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsDesignerState:
|
||||
components: dict[str, WorkflowComponent] = field(default_factory=dict)
|
||||
connections: list[Connection] = field(default_factory=list)
|
||||
component_counter: int = 0
|
||||
designer_height: int = 230
|
||||
properties_input_width: int = None
|
||||
properties_properties_width : int = None
|
||||
properties_output_width: int = None
|
||||
selected_component_id: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowsSettings:
|
||||
workflows: list[str] = field(default_factory=list)
|
||||
selected_workflow: str = None
|
||||
|
||||
|
||||
class WorkflowsDbManager:
|
||||
def __init__(self, session: dict, settings_manager: SettingsManager):
|
||||
self.session = session
|
||||
self.settings_manager = settings_manager
|
||||
|
||||
def add_workflow(self, workflow_name: str):
|
||||
settings = self._get_settings()
|
||||
|
||||
if not workflow_name:
|
||||
raise ValueError("Workflow name cannot be empty.")
|
||||
|
||||
if workflow_name in settings.workflows:
|
||||
raise ValueError(f"Workflow '{workflow_name}' already exists.")
|
||||
|
||||
settings.workflows.append(workflow_name)
|
||||
self.settings_manager.save(self.session, WORKFLOWS_DB_ENTRY, settings)
|
||||
return True
|
||||
|
||||
def get_workflow(self, workflow_name: str):
|
||||
if not workflow_name:
|
||||
raise ValueError("Workflow name cannot be empty.")
|
||||
|
||||
settings = self._get_settings()
|
||||
if workflow_name not in settings.workflows:
|
||||
raise ValueError(f"Workflow '{workflow_name}' does not exist.")
|
||||
|
||||
return next(filter(lambda r: r == workflow_name, settings.workflows))
|
||||
|
||||
# def modify_workflow(self, old_workflow_name, new_workflow_name: str, tables: list[str]):
|
||||
# if not old_workflow_name or not new_workflow_name:
|
||||
# raise ValueError("Workflow name cannot be empty.")
|
||||
#
|
||||
# settings = self._get_settings()
|
||||
# for workflow in settings.workflows:
|
||||
# if workflow == old_workflow_name:
|
||||
# workflow.name = new_workflow_name
|
||||
# workflow.tables = tables
|
||||
#
|
||||
# self.settings_manager.save(self.session, workflows_SETTINGS_ENTRY, settings)
|
||||
# return workflow
|
||||
#
|
||||
# else:
|
||||
# raise ValueError(f"workflow '{old_workflow_name}' not found.")
|
||||
|
||||
def remove_workflow(self, workflow_name):
|
||||
if not workflow_name:
|
||||
raise ValueError("Workflow name cannot be empty.")
|
||||
|
||||
settings = self._get_settings()
|
||||
if workflow_name not in settings.workflows:
|
||||
raise ValueError(f"workflow '{workflow_name}' does not exist.")
|
||||
|
||||
settings.workflows.remove(workflow_name)
|
||||
self.settings_manager.save(self.session, WORKFLOWS_DB_ENTRY, settings)
|
||||
return True
|
||||
|
||||
def exists_workflow(self, workflow_name):
|
||||
if not workflow_name:
|
||||
raise ValueError("workflow name cannot be empty.")
|
||||
|
||||
settings = self._get_settings()
|
||||
return workflow_name in settings.workflows
|
||||
|
||||
def get_workflows(self):
|
||||
return self._get_settings().workflows
|
||||
|
||||
def select_workflow(self, workflow_name: str):
|
||||
"""
|
||||
Select and save the specified workflow name in the current session's settings.
|
||||
|
||||
:param workflow_name: The name of the workflow to be selected and stored.
|
||||
:type workflow_name: str
|
||||
:return: None
|
||||
"""
|
||||
settings = self._get_settings()
|
||||
settings.selected_workflow = workflow_name
|
||||
self.settings_manager.save(self.session, WORKFLOWS_DB_ENTRY, settings)
|
||||
|
||||
def get_selected_workflow(self):
|
||||
settings = self._get_settings()
|
||||
return settings.selected_workflow
|
||||
|
||||
def _get_settings(self):
|
||||
return self.settings_manager.load(self.session, WORKFLOWS_DB_ENTRY, default=WorkflowsSettings())
|
||||
|
||||
|
||||
class WorkflowsDesignerDbManager:
|
||||
def __init__(self, session: dict, settings_manager: SettingsManager):
|
||||
self._session = session
|
||||
self._settings_manager = settings_manager
|
||||
self._undo_redo = ComponentsInstancesHelper.get_undo_redo(session)
|
||||
|
||||
@staticmethod
|
||||
def _get_db_entry(key):
|
||||
return make_safe_id(f"{WORKFLOW_DESIGNER_DB_ENTRY}_{key}")
|
||||
|
||||
def save_settings(self, key: str, settings: WorkflowsDesignerSettings):
|
||||
self._settings_manager.put(self._session,
|
||||
self._get_db_entry(key),
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY,
|
||||
settings)
|
||||
|
||||
def save_state(self, key: str, state: WorkflowsDesignerState, undo_redo_attrs: UndoRedoAttrs = None):
|
||||
db_entry = self._get_db_entry(key)
|
||||
self._settings_manager.put(self._session,
|
||||
db_entry,
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY,
|
||||
state)
|
||||
|
||||
if undo_redo_attrs is not None:
|
||||
self._undo_redo.snapshot(undo_redo_attrs,
|
||||
db_entry,
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY)
|
||||
|
||||
def save_all(self, key: str, settings: WorkflowsDesignerSettings = None, state: WorkflowsDesignerState = None):
|
||||
items = {}
|
||||
if settings is not None:
|
||||
items[WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY] = settings
|
||||
if state is not None:
|
||||
items[WORKFLOW_DESIGNER_DB_STATE_ENTRY] = state
|
||||
|
||||
self._settings_manager.put_many(self._session, self._get_db_entry(key), items)
|
||||
|
||||
def load_settings(self, key) -> WorkflowsDesignerSettings:
|
||||
return self._settings_manager.get(self._session,
|
||||
self._get_db_entry(key),
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY,
|
||||
default=WorkflowsDesignerSettings())
|
||||
|
||||
def load_state(self, key) -> WorkflowsDesignerState:
|
||||
return self._settings_manager.get(self._session,
|
||||
self._get_db_entry(key),
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY,
|
||||
default=WorkflowsDesignerState())
|
||||
@@ -3,9 +3,10 @@ from fasthtml.components import *
|
||||
from core.utils import merge_classes
|
||||
|
||||
|
||||
def mk_icon(icon, size=20, can_select=True, cls='', tooltip=None, **kwargs):
|
||||
def mk_icon(icon, size=20, can_select=True, can_hover=False, cls='', tooltip=None, **kwargs):
|
||||
merged_cls = merge_classes(f"icon-{size}",
|
||||
'icon-btn' if can_select else '',
|
||||
'mmt-btn' if can_hover else '',
|
||||
cls,
|
||||
kwargs)
|
||||
return mk_tooltip(icon, tooltip, cls=merged_cls, **kwargs) if tooltip else Div(icon, cls=merged_cls, **kwargs)
|
||||
@@ -81,7 +82,7 @@ def mk_accordion_section(component_id, title, icon, content, selected=False):
|
||||
)
|
||||
|
||||
|
||||
def set_boundaries(boundaries, remove_margin=True, other=0):
|
||||
def apply_boundaries(boundaries, remove_margin=True, other=0):
|
||||
if isinstance(boundaries, int):
|
||||
max_height = boundaries
|
||||
else:
|
||||
|
||||
73
src/core/Expando.py
Normal file
73
src/core/Expando.py
Normal file
@@ -0,0 +1,73 @@
|
||||
class Expando:
|
||||
"""
|
||||
Readonly dynamic class that eases the access to attributes and sub attributes
|
||||
It is initialized with a dict
|
||||
You can then access the property using dot '.' (ex. obj.prop1.prop2)
|
||||
"""
|
||||
|
||||
def __init__(self, props):
|
||||
self._props = props
|
||||
|
||||
def __getattr__(self, item):
|
||||
if item not in self._props:
|
||||
raise AttributeError(item)
|
||||
|
||||
current = self._props[item]
|
||||
return Expando(current) if isinstance(current, dict) else current
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self._props[key] = value
|
||||
|
||||
def get(self, path):
|
||||
"""
|
||||
returns the value, from a string with represents the path
|
||||
:param path:
|
||||
:return:
|
||||
"""
|
||||
current = self._props
|
||||
for attr in path.split("."):
|
||||
if isinstance(current, list):
|
||||
temp = []
|
||||
for value in current:
|
||||
if value and attr in value:
|
||||
temp.append(value[attr])
|
||||
current = temp
|
||||
|
||||
else:
|
||||
if current is None or attr not in current:
|
||||
return None
|
||||
current = current[attr]
|
||||
|
||||
return current
|
||||
|
||||
def as_dict(self):
|
||||
"""
|
||||
Return the information as a dictionary
|
||||
:return:
|
||||
"""
|
||||
return self._props.copy()
|
||||
|
||||
def to_dict(self, mappings: dict) -> dict:
|
||||
return {prop_name: self.get(path) for path, prop_name in mappings.items() if prop_name is not None}
|
||||
|
||||
def __hasattr__(self, item):
|
||||
return item in self._props
|
||||
|
||||
def __repr__(self):
|
||||
if "key" in self._props:
|
||||
return f"Expando(key={self._props["key"]})"
|
||||
|
||||
props_as_str = str(self._props)
|
||||
if len(props_as_str) > 50:
|
||||
props_as_str = props_as_str[:50] + "..."
|
||||
|
||||
return f"Expando({props_as_str})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Expando):
|
||||
return False
|
||||
|
||||
return self._props == other._props
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(sorted(self._props.items())))
|
||||
@@ -271,6 +271,42 @@ class DbEngine:
|
||||
except KeyError:
|
||||
raise DbException(f"Key '{key}' not found in entry '{entry}'")
|
||||
|
||||
def history(self, user_id, entry, digest=None, max_items=1000):
|
||||
"""
|
||||
Gives the current digest and all its ancestors
|
||||
:param user_id:
|
||||
:param entry:
|
||||
:param digest:
|
||||
:param max_items:
|
||||
:return:
|
||||
"""
|
||||
with self.lock:
|
||||
logger.info(f"History for {user_id=}, {entry=}, {digest=}")
|
||||
|
||||
digest_to_use = digest or self._get_entry_digest(user_id, entry)
|
||||
logger.debug(f"Using digest {digest_to_use}.")
|
||||
|
||||
count = 0
|
||||
history = []
|
||||
|
||||
while True:
|
||||
if count >= max_items or digest_to_use is None:
|
||||
break
|
||||
|
||||
history.append(digest_to_use)
|
||||
count += 1
|
||||
|
||||
try:
|
||||
target_file = self._get_obj_path(user_id, digest_to_use)
|
||||
with open(target_file, 'r', encoding='utf-8') as file:
|
||||
as_dict = json.load(file)
|
||||
|
||||
digest_to_use = as_dict[TAG_PARENT][0]
|
||||
except FileNotFoundError:
|
||||
break
|
||||
|
||||
return history
|
||||
|
||||
def debug_root(self):
|
||||
"""
|
||||
Lists all folders in the root directory
|
||||
@@ -312,7 +348,7 @@ class DbEngine:
|
||||
return []
|
||||
return [f for f in os.listdir(self.root) if os.path.isdir(os.path.join(self.root, f)) and f != 'refs']
|
||||
|
||||
def debug_get_digest(self, user_id, entry):
|
||||
def get_digest(self, user_id, entry):
|
||||
return self._get_entry_digest(user_id, entry)
|
||||
|
||||
def _serialize(self, obj):
|
||||
|
||||
76
src/core/fasthtml_helper.py
Normal file
76
src/core/fasthtml_helper.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
from core.utils import merge_classes
|
||||
|
||||
attr_map = {
|
||||
"cls": "class",
|
||||
"_id": "id",
|
||||
}
|
||||
|
||||
|
||||
def safe_attr(attr_name):
|
||||
attr_name = attr_name.replace("hx_", "hx-")
|
||||
attr_name = attr_name.replace("data_", "data-")
|
||||
return attr_map.get(attr_name, attr_name)
|
||||
|
||||
|
||||
def to_html(item):
|
||||
if item is None:
|
||||
return ""
|
||||
elif isinstance(item, str):
|
||||
return item
|
||||
elif isinstance(item, (int, float, bool)):
|
||||
return str(item)
|
||||
elif isinstance(item, MyFt):
|
||||
return item.to_html()
|
||||
elif isinstance(item, NotStr):
|
||||
return str(item)
|
||||
else:
|
||||
raise Exception(f"Unsupported type: {type(item)}, {item=}")
|
||||
|
||||
|
||||
class MyFt:
|
||||
def __init__(self, tag, *args, **kwargs):
|
||||
self.tag = tag
|
||||
self.children = args
|
||||
self.attrs = {safe_attr(k): v for k, v in kwargs.items()}
|
||||
|
||||
def to_html(self):
|
||||
body_items = [to_html(item) for item in self.children]
|
||||
return f"<{self.tag} {' '.join(f'{k}="{v}"' for k, v in self.attrs.items())}>{' '.join(body_items)}</div>"
|
||||
|
||||
def __ft__(self):
|
||||
return NotStr(self.to_html())
|
||||
|
||||
|
||||
class MyDiv(MyFt):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("div", *args, **kwargs)
|
||||
|
||||
|
||||
class MySpan(MyFt):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("span", *args, **kwargs)
|
||||
|
||||
|
||||
def mk_my_ellipsis(txt: str, cls='', **kwargs):
|
||||
merged_cls = merge_classes("truncate",
|
||||
cls,
|
||||
kwargs)
|
||||
return MyDiv(txt, cls=merged_cls, data_tooltip=txt, **kwargs)
|
||||
|
||||
|
||||
def mk_my_icon(icon, size=20, can_select=True, can_hover=False, cls='', tooltip=None, **kwargs):
|
||||
merged_cls = merge_classes(f"icon-{size}",
|
||||
'icon-btn' if can_select else '',
|
||||
'mmt-btn' if can_hover else '',
|
||||
cls,
|
||||
kwargs)
|
||||
return mk_my_tooltip(icon, tooltip, cls=merged_cls, **kwargs) if tooltip else MyDiv(icon, cls=merged_cls, **kwargs)
|
||||
|
||||
|
||||
def mk_my_tooltip(element, tooltip: str, cls='', **kwargs):
|
||||
merged_cls = merge_classes("mmt-tooltip",
|
||||
cls,
|
||||
kwargs)
|
||||
return MyDiv(element, cls=merged_cls, data_tooltip=tooltip, **kwargs)
|
||||
296
src/core/jira.py
Normal file
296
src/core/jira.py
Normal file
@@ -0,0 +1,296 @@
|
||||
import json
|
||||
import logging
|
||||
from enum import Enum
|
||||
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from core.Expando import Expando
|
||||
|
||||
JIRA_ROOT = "https://altares.atlassian.net/rest/api/3"
|
||||
DEFAULT_HEADERS = {"Accept": "application/json"}
|
||||
DEFAULT_SEARCH_FIELDS = "summary,status,assignee"
|
||||
logger = logging.getLogger("Jira")
|
||||
|
||||
|
||||
class NotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class JiraRequestTypes(Enum):
|
||||
Search = "search"
|
||||
Issue = "issue"
|
||||
Comments = "comments"
|
||||
Versions = "versions"
|
||||
|
||||
|
||||
class Jira:
|
||||
"""Manage default operation to JIRA"""
|
||||
|
||||
def __init__(self, user_name: str, api_token: str, fields=DEFAULT_SEARCH_FIELDS):
|
||||
"""
|
||||
Prepare a connection to JIRA
|
||||
The initialisation do not to anything,
|
||||
It only stores the user_name and the api_token
|
||||
Note that user_name and api_token is the recommended way to connect,
|
||||
therefore, the only supported here
|
||||
:param user_name:
|
||||
:param api_token:
|
||||
"""
|
||||
self.user_name = user_name
|
||||
self.api_token = api_token
|
||||
self.auth = HTTPBasicAuth(self.user_name, self.api_token)
|
||||
self.fields = fields
|
||||
|
||||
def test(self):
|
||||
logger.debug(f"test with no parameters")
|
||||
|
||||
url = f"{JIRA_ROOT}/myself"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
return response
|
||||
|
||||
def issue(self, issue_id: str) -> list[Expando]:
|
||||
"""
|
||||
Retrieve an issue
|
||||
:param issue_id:
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"comments with {issue_id=}")
|
||||
|
||||
url = f"{JIRA_ROOT}/issue/{issue_id}"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
return [Expando(json.loads(response.text))]
|
||||
|
||||
def fields(self) -> list[Expando]:
|
||||
"""
|
||||
Retrieve the list of all fields for an issue
|
||||
:return:
|
||||
"""
|
||||
url = f"{JIRA_ROOT}/field"
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
return [Expando(field) for field in as_dict]
|
||||
|
||||
def search(self, jql: str, fields=None) -> list[Expando]:
|
||||
"""
|
||||
Executes a JQL and returns the list of issues
|
||||
:param jql:
|
||||
:param fields: list of fields to retrieve
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"search with {jql=}, {fields=}")
|
||||
|
||||
if not jql:
|
||||
raise ValueError("Jql cannot be empty.")
|
||||
|
||||
if not fields:
|
||||
fields = self.fields
|
||||
|
||||
url = f"{JIRA_ROOT}/search"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
headers = DEFAULT_HEADERS.copy()
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
payload = {
|
||||
"fields": [f.strip() for f in fields.split(",")],
|
||||
"fieldsByKeys": False,
|
||||
"jql": jql,
|
||||
"maxResults": 500, # Does not seem to be used. It's always 100 !
|
||||
"startAt": 0
|
||||
}
|
||||
logger.debug(f" payload: {payload}")
|
||||
|
||||
result = []
|
||||
while True:
|
||||
logger.debug(f" Request startAt '{payload['startAt']}'")
|
||||
response = requests.request("POST",
|
||||
url,
|
||||
data=json.dumps(payload),
|
||||
headers=headers,
|
||||
auth=self.auth)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(self._format_error(response))
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
result += as_dict["issues"]
|
||||
|
||||
if as_dict["startAt"] + as_dict["maxResults"] >= as_dict["total"]:
|
||||
logger.debug(f" response: {response}")
|
||||
# We retrieve more than the total nuber of items
|
||||
break
|
||||
|
||||
payload["startAt"] += as_dict["maxResults"]
|
||||
|
||||
return [Expando(issue) for issue in result]
|
||||
|
||||
def comments(self, issue_id: str) -> list[Expando]:
|
||||
"""
|
||||
Retrieve the list of comments for an issue
|
||||
:param issue_id:
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"comments with {issue_id=}")
|
||||
|
||||
url = f"{JIRA_ROOT}/issue/{issue_id}/comment"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request("GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(self._format_error(response))
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
result = as_dict["comments"]
|
||||
return [Expando(issue) for issue in result]
|
||||
|
||||
def versions(self, project_key):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
returns fixVersion number in JIRA
|
||||
:param project_key:
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"versions with {project_key=}")
|
||||
|
||||
url = f"{JIRA_ROOT}/project/{project_key}/versions"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise NotFound()
|
||||
|
||||
as_list = json.loads(response.text)
|
||||
return [Expando(version) for version in as_list]
|
||||
|
||||
def extract(self, jql, mappings, updates=None) -> list[dict]:
|
||||
"""
|
||||
Executes a jql and returns list of dict
|
||||
The <code>issue</code> object, returned by the <ref>jql</ref> methods
|
||||
contains all the fields for Jira. They are not all necessary
|
||||
This method selects the required fields
|
||||
:param jql:
|
||||
:param mappings:
|
||||
:param updates: List of updates (lambda on issue) to perform
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"Processing extract using mapping {mappings}")
|
||||
|
||||
def _get_field(mapping):
|
||||
"""Returns the meaningful jira field, for the mapping description path"""
|
||||
fields = mapping.split(".")
|
||||
return fields[1] if len(fields) > 1 and fields[0] == "fields" else fields[0]
|
||||
|
||||
# retrieve the list of requested fields from what was asked in the mapping
|
||||
jira_fields = [_get_field(mapping) for mapping in mappings]
|
||||
as_string = ", ".join(jira_fields)
|
||||
issues = self.issues(jql, as_string)
|
||||
|
||||
for issue in issues:
|
||||
# apply updates if needed
|
||||
if updates:
|
||||
for update in updates:
|
||||
update(issue)
|
||||
|
||||
row = {cvs_col: issue.get(jira_path) for jira_path, cvs_col in mappings.items() if cvs_col is not None}
|
||||
yield row
|
||||
|
||||
def get_version(self, project_key, version_name):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
returns fixVersion number in JIRA
|
||||
:param project_key:
|
||||
:param version_name:
|
||||
:return:
|
||||
"""
|
||||
|
||||
for version in self.versions(project_key):
|
||||
if version.name == version_name:
|
||||
return version
|
||||
|
||||
raise NotFound()
|
||||
|
||||
def get_all_fields(self):
|
||||
"""
|
||||
Helper function that returns the list of all field that can be requested in an issue
|
||||
:return:
|
||||
"""
|
||||
url = f"{JIRA_ROOT}/field"
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
as_dict = json.loads(response.text)
|
||||
return [Expando(issue) for issue in as_dict]
|
||||
|
||||
@staticmethod
|
||||
def update_customer_refs(issue: Expando, bug_only=True, link_name=None):
|
||||
issue["ticket_customer_refs"] = []
|
||||
if bug_only and issue.fields.issuetype.name != "Bug":
|
||||
return
|
||||
|
||||
for issue_link in issue.fields.issuelinks: # [i_link for i_link in issue.fields.issuelinks if i_link["type"]["name"] == "Relates"]:
|
||||
if link_name and issue_link["type"]["name"] not in link_name:
|
||||
continue
|
||||
|
||||
direction = "inwardIssue" if "inwardIssue" in issue_link else "outwardIssue"
|
||||
related_issue_key = issue_link[direction]["key"]
|
||||
if related_issue_key.startswith("ITSUP"):
|
||||
issue.ticket_customer_refs.append(related_issue_key)
|
||||
continue
|
||||
|
||||
@staticmethod
|
||||
def _format_error(response):
|
||||
if "errorMessages" in response.text:
|
||||
error_messages = json.loads(response.text)["errorMessages"]
|
||||
else:
|
||||
error_messages = response.text
|
||||
return f"Error {response.status_code} : {response.reason} : {error_messages}"
|
||||
189
src/core/preprocessor.py
Normal file
189
src/core/preprocessor.py
Normal file
@@ -0,0 +1,189 @@
|
||||
from arpeggio import RegExMatch, ZeroOrMore, OneOrMore, ParserPython, EOF, NoMatch
|
||||
|
||||
|
||||
class VariableParsingError(Exception):
|
||||
"""Custom exception for variable parsing errors"""
|
||||
|
||||
def __init__(self, message, position):
|
||||
self.message = message
|
||||
self.position = position
|
||||
super().__init__(f"Variable parsing error at position {position}: {message}")
|
||||
|
||||
|
||||
class VariableProcessingError(Exception):
|
||||
"""Custom exception for variable parsing errors"""
|
||||
|
||||
def __init__(self, message, position):
|
||||
self.message = message
|
||||
self.position = position
|
||||
super().__init__(f"Variable processing error at position {position}: {message}")
|
||||
|
||||
|
||||
def variable_name():
|
||||
"""Variable name: alphanumeric characters and underscores"""
|
||||
return RegExMatch(r'[a-zA-Z_][a-zA-Z0-9_]*')
|
||||
|
||||
|
||||
def property_name():
|
||||
"""Property name: same rules as variable name"""
|
||||
return RegExMatch(r'[a-zA-Z_][a-zA-Z0-9_]*')
|
||||
|
||||
|
||||
def variable_property():
|
||||
"""A property access: .property_name"""
|
||||
return ".", property_name
|
||||
|
||||
|
||||
def variable():
|
||||
"""A complete variable: $variable_name(.property)*"""
|
||||
return "$", variable_name, ZeroOrMore(variable_property)
|
||||
|
||||
|
||||
def text_char():
|
||||
"""Any character that is not the start of a variable"""
|
||||
return RegExMatch(r'[^$]')
|
||||
|
||||
|
||||
def text_segment():
|
||||
"""One or more non-variable characters"""
|
||||
return OneOrMore(text_char)
|
||||
|
||||
|
||||
def element():
|
||||
"""Either a variable or a text segment"""
|
||||
return [variable, text_segment]
|
||||
|
||||
|
||||
def expression():
|
||||
"""Complete expression: sequence of elements"""
|
||||
return ZeroOrMore(element), EOF
|
||||
|
||||
|
||||
class PlainTextPreprocessor:
|
||||
def __init__(self):
|
||||
self.parser = ParserPython(expression, debug=False, skipws=False)
|
||||
|
||||
@staticmethod
|
||||
def _post_validation(elements):
|
||||
if len(elements) < 2:
|
||||
return
|
||||
|
||||
for element, next_element in [(element, elements[i + 1]) for i, element in enumerate(elements[:-1])]:
|
||||
if element['type'] == 'variable' and next_element['type'] == 'variable':
|
||||
raise VariableParsingError("Invalid syntax.", next_element['start'])
|
||||
|
||||
@staticmethod
|
||||
def _extract_elements_from_tree(parse_tree, original_text):
|
||||
"""Extract elements with positions from the parse tree"""
|
||||
elements = []
|
||||
|
||||
def process_node(node, current_pos=0):
|
||||
nonlocal elements
|
||||
|
||||
if hasattr(node, 'rule_name'):
|
||||
if node.rule_name == 'variable':
|
||||
# Extract variable information
|
||||
var_start = node.position
|
||||
var_end = node.position_end
|
||||
var_text = original_text[var_start:var_end]
|
||||
|
||||
parts = var_text[1:].split('.') # Remove $ and split by .
|
||||
var_name = parts[0]
|
||||
properties = parts[1:] if len(parts) > 1 else []
|
||||
|
||||
elements.append({
|
||||
"type": "variable",
|
||||
"name": var_name,
|
||||
"properties": properties,
|
||||
"start": var_start,
|
||||
"end": var_end
|
||||
})
|
||||
|
||||
elif node.rule_name == 'text_segment':
|
||||
# Extract text segment
|
||||
text_start = node.position
|
||||
text_end = node.position_end
|
||||
content = original_text[text_start:text_end]
|
||||
|
||||
stripped = content.strip()
|
||||
if len(stripped) > 0 and stripped[0] == '.':
|
||||
raise VariableParsingError("Invalid syntax in property name.", text_start)
|
||||
|
||||
elements.append({
|
||||
"type": "text",
|
||||
"content": content,
|
||||
"start": text_start,
|
||||
"end": text_end
|
||||
})
|
||||
|
||||
elif node.rule_name in ('expression', 'element'):
|
||||
for child in node:
|
||||
process_node(child, current_pos)
|
||||
|
||||
# Process children
|
||||
if hasattr(node, '_tx_children') and node._tx_children:
|
||||
for child in node._tx_children:
|
||||
process_node(child, current_pos)
|
||||
|
||||
process_node(parse_tree)
|
||||
return elements
|
||||
|
||||
def parse(self, text):
|
||||
"""
|
||||
Parse text and return structure with text segments and variables with positions
|
||||
|
||||
Returns:
|
||||
[
|
||||
{"type": "text", "content": "...", "start": int, "end": int},
|
||||
{"type": "variable", "name": "...", "properties": [...], "start": int, "end": int}
|
||||
]
|
||||
"""
|
||||
if not text:
|
||||
return []
|
||||
|
||||
try:
|
||||
# Parse the text
|
||||
parse_tree = self.parser.parse(text)
|
||||
|
||||
# Extract elements from parse tree
|
||||
elements = self._extract_elements_from_tree(parse_tree, text)
|
||||
|
||||
# Extra validations
|
||||
self._post_validation(elements)
|
||||
|
||||
# Sort elements by start position
|
||||
elements.sort(key=lambda x: x['start'])
|
||||
|
||||
return elements
|
||||
|
||||
except NoMatch as e:
|
||||
# Convert Arpeggio parsing errors to our custom error
|
||||
raise VariableParsingError(f"Invalid syntax", e.position)
|
||||
except Exception as e:
|
||||
if isinstance(e, VariableParsingError):
|
||||
raise
|
||||
raise VariableParsingError(f"Parsing failed: {str(e)}", 0)
|
||||
|
||||
def preprocess(self, text, namepace):
|
||||
result = ""
|
||||
elements = self.parse(text)
|
||||
for element in elements:
|
||||
if element['type'] == 'text':
|
||||
result += element['content']
|
||||
elif element['type'] == 'variable':
|
||||
value = namepace.get(element['name'])
|
||||
if value is None:
|
||||
raise VariableProcessingError(f"Variable '{element['name']}' is not defined.", element['start'])
|
||||
|
||||
try:
|
||||
pos = element['start'] + len(element['name']) + 1 # +1 for the starting '$'
|
||||
for property_name in element['properties']:
|
||||
value = getattr(value, property_name)
|
||||
pos += len(property_name) + 1 # +1 for the dot '.'
|
||||
except AttributeError as e:
|
||||
raise VariableProcessingError(f"Invalid property '{property_name}' for variable '{element['name']}'.",
|
||||
pos) from e
|
||||
|
||||
result += str(value)
|
||||
|
||||
return result
|
||||
@@ -87,7 +87,7 @@ class MemoryDbEngine:
|
||||
obj.update(items)
|
||||
|
||||
def exists(self, user_id: str, entry: str):
|
||||
return user_id in entry and entry in self.db[user_id]
|
||||
return user_id in self.db and entry in self.db[user_id]
|
||||
|
||||
|
||||
class SettingsManager:
|
||||
@@ -98,10 +98,10 @@ class SettingsManager:
|
||||
user_id, user_email = self._get_user(session)
|
||||
return self._db_engine.save(user_id, user_email, entry, obj)
|
||||
|
||||
def load(self, session: dict, entry: str, default=NoDefault):
|
||||
def load(self, session: dict, entry: str, digest=None, default=NoDefault):
|
||||
user_id, _ = self._get_user(session)
|
||||
try:
|
||||
return self._db_engine.load(user_id, entry)
|
||||
return self._db_engine.load(user_id, entry, digest)
|
||||
except DbException:
|
||||
return default
|
||||
|
||||
@@ -128,6 +128,14 @@ class SettingsManager:
|
||||
|
||||
return self._db_engine.exists(user_id, entry)
|
||||
|
||||
def get_digest(self, session: dict, entry: str):
|
||||
user_id, _ = self._get_user(session)
|
||||
return self._db_engine.get_digest(user_id, entry)
|
||||
|
||||
def history(self, session, entry, digest=None, max_items=1000):
|
||||
user_id, _ = self._get_user(session)
|
||||
return self._db_engine.history(user_id, entry, digest, max_items)
|
||||
|
||||
def get_db_engine(self):
|
||||
return self._db_engine
|
||||
|
||||
@@ -177,7 +185,7 @@ class GenericDbManager:
|
||||
if key.startswith("_"):
|
||||
super().__setattr__(key, value)
|
||||
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
|
||||
if not (hasattr(settings, key)):
|
||||
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{key}'.")
|
||||
|
||||
@@ -188,7 +196,7 @@ class GenericDbManager:
|
||||
if item.startswith("_"):
|
||||
return super().__getattribute__(item)
|
||||
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
|
||||
if not (hasattr(settings, item)):
|
||||
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{item}'.")
|
||||
|
||||
@@ -250,7 +258,7 @@ class NestedSettingsManager:
|
||||
self._settings_manager.save(self._session, self._obj_entry, settings)
|
||||
|
||||
def _get_settings_and_object(self):
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
|
||||
if not hasattr(settings, self._obj_attribute):
|
||||
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{self._obj_attribute}'.")
|
||||
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
import ast
|
||||
import base64
|
||||
import cProfile
|
||||
import functools
|
||||
import hashlib
|
||||
import importlib
|
||||
import inspect
|
||||
import pkgutil
|
||||
import re
|
||||
import time
|
||||
import types
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from io import BytesIO
|
||||
from urllib.parse import urlparse
|
||||
@@ -417,3 +422,109 @@ def split_host_port(url):
|
||||
port = None
|
||||
|
||||
return host, port
|
||||
|
||||
|
||||
def timed(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
start = time.perf_counter()
|
||||
result = func(*args, **kwargs)
|
||||
end = time.perf_counter()
|
||||
|
||||
# get class name
|
||||
class_name = None
|
||||
if args:
|
||||
# check the first argument to see if it's a class'
|
||||
if inspect.isclass(args[0]):
|
||||
class_name = args[0].__name__ # class method
|
||||
elif hasattr(args[0], "__class__"):
|
||||
class_name = args[0].__class__.__name__ # instance method
|
||||
|
||||
if class_name:
|
||||
print(f"[PERF] {class_name}.{func.__name__} took {end - start:.4f} sec")
|
||||
else:
|
||||
print(f"[PERF] {func.__name__} took {end - start:.4f} sec")
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def profile_function(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
profiler = cProfile.Profile()
|
||||
try:
|
||||
profiler.enable()
|
||||
result = func(*args, **kwargs)
|
||||
finally:
|
||||
profiler.disable()
|
||||
|
||||
# Determine class name if any
|
||||
class_name = None
|
||||
if args:
|
||||
if inspect.isclass(args[0]):
|
||||
class_name = args[0].__name__ # class method
|
||||
elif hasattr(args[0], "__class__"):
|
||||
class_name = args[0].__class__.__name__ # instance method
|
||||
|
||||
# Compose filename with timestamp
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if class_name:
|
||||
filename = f"{class_name}_{func.__name__}_{timestamp}.prof"
|
||||
else:
|
||||
filename = f"{func.__name__}_{timestamp}.prof"
|
||||
|
||||
# Dump stats to file
|
||||
profiler.dump_stats(filename)
|
||||
print(f"[PROFILE] Profiling data saved to {filename}")
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class UnreferencedNamesVisitor(ast.NodeVisitor):
|
||||
"""
|
||||
Try to find symbols that will be requested by the ast
|
||||
It can be variable names, but also function names
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.names = set()
|
||||
|
||||
def get_names(self, node):
|
||||
self.visit(node)
|
||||
return self.names
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.names.add(node.id)
|
||||
|
||||
def visit_For(self, node: ast.For):
|
||||
self.visit_selected(node, ["body", "orelse"])
|
||||
|
||||
def visit_selected(self, node, to_visit):
|
||||
"""Called if no explicit visitor function exists for a node."""
|
||||
for field in to_visit:
|
||||
value = getattr(node, field)
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, ast.AST):
|
||||
self.visit(item)
|
||||
elif isinstance(value, ast.AST):
|
||||
self.visit(value)
|
||||
|
||||
def visit_Call(self, node: ast.Call):
|
||||
self.visit_selected(node, ["args", "keywords"])
|
||||
|
||||
def visit_keyword(self, node: ast.keyword):
|
||||
"""
|
||||
Keywords are parameters that are defined with a double star (**) in function / method definition
|
||||
ex: def fun(positional, *args, **keywords)
|
||||
:param node:
|
||||
:type node:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
self.names.add(node.arg)
|
||||
self.visit_selected(node, ["value"])
|
||||
@@ -47,4 +47,9 @@ loggers:
|
||||
AddStuffApp:
|
||||
level: INFO
|
||||
handlers: [ console ]
|
||||
propagate: False
|
||||
|
||||
Jira:
|
||||
level: DEBUG
|
||||
handlers: [ console ]
|
||||
propagate: False
|
||||
83
src/main.py
83
src/main.py
@@ -1,6 +1,7 @@
|
||||
# global layout
|
||||
import asyncio
|
||||
import logging.config
|
||||
import random
|
||||
from asyncio import sleep
|
||||
|
||||
import yaml
|
||||
from fasthtml.common import *
|
||||
@@ -54,6 +55,9 @@ links = [
|
||||
Link(href="./assets/daisyui-5-themes.css", rel="stylesheet", type="text/css"),
|
||||
Script(src="./assets/tailwindcss-browser@4.js"),
|
||||
|
||||
# SSE
|
||||
Script(src="https://unpkg.com/htmx-ext-sse@2.2.1/sse.js"),
|
||||
|
||||
# Old drawer layout
|
||||
Script(src="./assets/DrawerLayout.js", defer=True),
|
||||
Link(rel="stylesheet", href="./assets/DrawerLayout.css"),
|
||||
@@ -145,9 +149,11 @@ register_component("login", "components.login", "LoginApp")
|
||||
register_component("register", "components.register", "RegisterApp")
|
||||
register_component("theme_controller", "components.themecontroller", "ThemeControllerApp")
|
||||
register_component("main_layout", "components.drawerlayout", "DrawerLayoutApp")
|
||||
register_component("undo_redo", "components.undo_redo", "UndoRedoApp")
|
||||
register_component("tabs", "components.tabs", "TabsApp") # before repositories
|
||||
register_component("applications", "components.applications", "ApplicationsApp")
|
||||
register_component("repositories", "components.repositories", "RepositoriesApp")
|
||||
register_component("workflows", "components.workflows", "WorkflowsApp")
|
||||
register_component("add_stuff", "components.addstuff", None)
|
||||
register_component("form", "components.form", "FormApp")
|
||||
register_component("datagrid_new", "components.datagrid_new", "DataGridApp")
|
||||
@@ -209,6 +215,25 @@ app, rt = fast_app(
|
||||
pico=False,
|
||||
)
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Profiling middleware
|
||||
# -------------------------
|
||||
@app.middleware("http")
|
||||
async def timing_middleware(request, call_next):
|
||||
import time
|
||||
start_total = time.perf_counter()
|
||||
|
||||
# Call the next middleware or route handler
|
||||
response = await call_next(request)
|
||||
|
||||
end_total = time.perf_counter()
|
||||
elapsed = end_total - start_total
|
||||
|
||||
print(f"[PERF] Total server time: {elapsed:.4f} sec - Path: {request.url.path}")
|
||||
return response
|
||||
|
||||
|
||||
settings_manager = SettingsManager()
|
||||
|
||||
import_settings = AdminImportSettings(settings_manager, None)
|
||||
@@ -251,6 +276,42 @@ def get(session):
|
||||
DrawerLayoutOld(pages),)
|
||||
|
||||
|
||||
shutdown_event = signal_shutdown()
|
||||
|
||||
|
||||
async def number_generator():
|
||||
while True: # not shutdown_event.is_set():
|
||||
data = Article(random.randint(1, 100))
|
||||
print(data)
|
||||
yield sse_message(data)
|
||||
await sleep(1)
|
||||
|
||||
|
||||
@rt("/sse")
|
||||
def get():
|
||||
return Titled("SSE Random Number Generator",
|
||||
P("Generate pairs of random numbers, as the list grows scroll downwards."),
|
||||
Div(hx_ext="sse",
|
||||
sse_connect="/number-stream",
|
||||
hx_swap="beforeend show:bottom",
|
||||
sse_swap="message"))
|
||||
|
||||
|
||||
@rt("/number-stream")
|
||||
async def get(): return EventStream(number_generator())
|
||||
|
||||
|
||||
@rt('/toasting')
|
||||
def get(session):
|
||||
# Normally one toast is enough, this allows us to see
|
||||
# different toast types in action.
|
||||
add_toast(session, f"Toast is being cooked", "info")
|
||||
add_toast(session, f"Toast is ready", "success")
|
||||
add_toast(session, f"Toast is getting a bit crispy", "warning")
|
||||
add_toast(session, f"Toast is burning!", "error")
|
||||
return Titled("I like toast")
|
||||
|
||||
|
||||
# Error Handling
|
||||
@app.get("/{path:path}")
|
||||
def not_found(path: str, session=None):
|
||||
@@ -273,18 +334,7 @@ def not_found(path: str, session=None):
|
||||
setup_toasts(app)
|
||||
|
||||
|
||||
@rt('/toasting')
|
||||
def get(session):
|
||||
# Normally one toast is enough, this allows us to see
|
||||
# different toast types in action.
|
||||
add_toast(session, f"Toast is being cooked", "info")
|
||||
add_toast(session, f"Toast is ready", "success")
|
||||
add_toast(session, f"Toast is getting a bit crispy", "warning")
|
||||
add_toast(session, f"Toast is burning!", "error")
|
||||
return Titled("I like toast")
|
||||
|
||||
|
||||
async def main():
|
||||
def main():
|
||||
logger.info(f" Starting FastHTML server on http://localhost:{APP_PORT}")
|
||||
serve(port=APP_PORT)
|
||||
|
||||
@@ -292,9 +342,4 @@ async def main():
|
||||
if __name__ == "__main__":
|
||||
# Start your application
|
||||
logger.info("Application starting...")
|
||||
try:
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\nStopping application...")
|
||||
except Exception as e:
|
||||
logger.error(f"Error: {e}")
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from components.repositories.components.Repositories import Repositories
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from core.instance_manager import InstanceManager
|
||||
|
||||
|
||||
@@ -6,3 +7,8 @@ class ComponentsInstancesHelper:
|
||||
@staticmethod
|
||||
def get_repositories(session):
|
||||
return InstanceManager.get(session, Repositories.create_component_id(session))
|
||||
|
||||
@staticmethod
|
||||
def get_undo_redo(session):
|
||||
return InstanceManager.get(session, UndoRedo.create_component_id(session))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from dataclasses import is_dataclass
|
||||
|
||||
from components.datagrid_new.db_management import DataGridDbManager
|
||||
from core.Expando import Expando
|
||||
|
||||
|
||||
class DataHelper:
|
||||
@@ -16,6 +17,8 @@ class DataHelper:
|
||||
if object_type:
|
||||
if is_dataclass(object_type):
|
||||
return [object_type(**row) for row in dataframe.to_dict(orient="records")]
|
||||
elif object_type is Expando:
|
||||
return [Expando(row) for row in dataframe.to_dict(orient="records")]
|
||||
else:
|
||||
raise ValueError("object_type must be a dataclass type")
|
||||
|
||||
|
||||
14
src/utils/DbManagementHelper.py
Normal file
14
src/utils/DbManagementHelper.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from utils.ComponentsInstancesHelper import ComponentsInstancesHelper
|
||||
|
||||
|
||||
class DbManagementHelper:
|
||||
@staticmethod
|
||||
def list_repositories(session):
|
||||
return ComponentsInstancesHelper.get_repositories(session).db.get_repositories()
|
||||
|
||||
@staticmethod
|
||||
def list_tables(session, repository_name):
|
||||
if not repository_name:
|
||||
return []
|
||||
repository = ComponentsInstancesHelper.get_repositories(session).db.get_repository(repository_name)
|
||||
return repository.tables
|
||||
105
src/workflow/DefaultDataPresenter.py
Normal file
105
src/workflow/DefaultDataPresenter.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from typing import Any
|
||||
|
||||
from core.Expando import Expando
|
||||
from workflow.engine import DataPresenter
|
||||
|
||||
|
||||
class DefaultDataPresenter(DataPresenter):
|
||||
"""Default data presenter that returns the input data unchanged."""
|
||||
|
||||
def __init__(self, component_id: str, mappings_definition: str):
|
||||
super().__init__(component_id)
|
||||
self._mappings_definition = mappings_definition
|
||||
self._split_definitions = [definition.strip() for definition in mappings_definition.split(",")]
|
||||
|
||||
if "*" not in mappings_definition:
|
||||
self._static_mappings = self._get_static_mappings()
|
||||
else:
|
||||
self._static_mappings = None
|
||||
|
||||
def present(self, data: Any) -> Any:
|
||||
self._validate_mappings_definition()
|
||||
|
||||
if self._static_mappings:
|
||||
return Expando(data.to_dict(self._static_mappings))
|
||||
|
||||
dynamic_mappings = self._get_dynamic_mappings(data)
|
||||
return Expando(data.to_dict(dynamic_mappings))
|
||||
|
||||
def _get_dynamic_mappings(self, data):
|
||||
|
||||
manage_conflicts = {}
|
||||
|
||||
mappings = {}
|
||||
for mapping in self._split_definitions:
|
||||
if "=" in mapping:
|
||||
key, value = [s.strip() for s in mapping.split('=', 1)]
|
||||
if key == "*":
|
||||
# all fields
|
||||
if value != "*":
|
||||
raise ValueError("Only '*' is accepted when renaming wildcard.")
|
||||
for key in data.as_dict().keys():
|
||||
if key in manage_conflicts:
|
||||
raise ValueError(f"Collision detected for field '{key}'. It is mapped from both '{manage_conflicts[key]}' and '{mapping}'.")
|
||||
manage_conflicts[key] = mapping
|
||||
mappings[key] = key
|
||||
elif key.endswith(".*"):
|
||||
# all fields in a sub-object
|
||||
if value != "*" and value != "":
|
||||
raise ValueError("Only '*' is accepted when renaming wildcard.")
|
||||
obj_path = key[:-2]
|
||||
sub_obj = data.get(obj_path)
|
||||
if isinstance(sub_obj, dict):
|
||||
for sub_field in sub_obj:
|
||||
if sub_field in manage_conflicts:
|
||||
raise ValueError(
|
||||
f"Collision detected for field '{sub_field}'. It is mapped from both '{manage_conflicts[sub_field]}' and '{mapping}'.")
|
||||
manage_conflicts[sub_field] = mapping
|
||||
mappings[f"{obj_path}.{sub_field}"] = sub_field
|
||||
else:
|
||||
raise ValueError(f"Field '{obj_path}' is not an object.")
|
||||
else:
|
||||
mappings[key.strip()] = value.strip()
|
||||
|
||||
|
||||
else:
|
||||
if mapping == "*":
|
||||
# all fields
|
||||
for key in data.as_dict().keys():
|
||||
mappings[key] = key
|
||||
elif mapping.endswith(".*"):
|
||||
# all fields in a sub-object
|
||||
obj_path = mapping[:-2]
|
||||
sub_obj = data.get(obj_path)
|
||||
if isinstance(sub_obj, dict):
|
||||
for sub_field in sub_obj:
|
||||
mappings[f"{obj_path}.{sub_field}"] = f"{obj_path}.{sub_field}"
|
||||
else:
|
||||
raise ValueError(f"Field '{obj_path}' is not an object.")
|
||||
else:
|
||||
mappings[mapping] = mapping
|
||||
|
||||
return mappings
|
||||
|
||||
def _get_static_mappings(self):
|
||||
mappings = {}
|
||||
for mapping in self._split_definitions:
|
||||
if "=" in mapping:
|
||||
key, value = [s.strip() for s in mapping.split('=', 1)]
|
||||
if not value:
|
||||
value = key.split(".")[-1]
|
||||
mappings[key] = value
|
||||
else:
|
||||
mappings[mapping] = mapping
|
||||
|
||||
return mappings
|
||||
|
||||
def _validate_mappings_definition(self):
|
||||
last_char_was_comma = False
|
||||
for i, char in enumerate(self._mappings_definition):
|
||||
if char == ',':
|
||||
if last_char_was_comma:
|
||||
raise ValueError(f"Invalid mappings definition: Error found at index {i}")
|
||||
last_char_was_comma = True
|
||||
elif not char.isspace():
|
||||
last_char_was_comma = False
|
||||
0
src/workflow/__init__.py
Normal file
0
src/workflow/__init__.py
Normal file
202
src/workflow/engine.py
Normal file
202
src/workflow/engine.py
Normal file
@@ -0,0 +1,202 @@
|
||||
import ast
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Generator
|
||||
|
||||
from components.admin.admin_db_manager import AdminDbManager
|
||||
from core.Expando import Expando
|
||||
from core.jira import Jira, JiraRequestTypes
|
||||
from core.preprocessor import PlainTextPreprocessor
|
||||
from core.utils import UnreferencedNamesVisitor
|
||||
from utils.Datahelper import DataHelper
|
||||
|
||||
|
||||
class DataProcessorError(Exception):
|
||||
def __init__(self, component_id, error):
|
||||
self.component_id = component_id
|
||||
self.error = error
|
||||
|
||||
|
||||
class DataProcessor(ABC):
|
||||
"""Base class for all data processing components."""
|
||||
|
||||
def __init__(self, component_id: str = None):
|
||||
self.component_id = component_id
|
||||
|
||||
@abstractmethod
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
pass
|
||||
|
||||
|
||||
class DataProducer(DataProcessor):
|
||||
"""Base class for data producers that emit data using generators."""
|
||||
|
||||
@abstractmethod
|
||||
def emit(self, data: Any = None) -> Generator[Any, None, None]:
|
||||
"""Emit data items one by one using yield. Can augment input data."""
|
||||
pass
|
||||
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
try:
|
||||
yield from self.emit(data)
|
||||
|
||||
except Exception as e:
|
||||
raise DataProcessorError(self.component_id, e)
|
||||
|
||||
|
||||
class DataFilter(DataProcessor):
|
||||
"""Base class for data filters that process data items."""
|
||||
|
||||
@abstractmethod
|
||||
def filter(self, data: Any) -> bool:
|
||||
"""Filter data items. Return True to keep the item, False to discard it."""
|
||||
pass
|
||||
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
try:
|
||||
if self.filter(data):
|
||||
yield data
|
||||
|
||||
except Exception as e:
|
||||
raise DataProcessorError(self.component_id, e)
|
||||
|
||||
|
||||
class DataPresenter(DataProcessor):
|
||||
"""Base class for data presenters that transform data items."""
|
||||
|
||||
@abstractmethod
|
||||
def present(self, data: Any) -> Any:
|
||||
"""Present/transform data items."""
|
||||
pass
|
||||
|
||||
def process(self, data: Any) -> Generator[Any, None, None]:
|
||||
try:
|
||||
yield self.present(data)
|
||||
|
||||
except Exception as e:
|
||||
raise DataProcessorError(self.component_id, e)
|
||||
|
||||
|
||||
class TableDataProducer(DataProducer):
|
||||
"""Base class for data producers that emit data from a repository."""
|
||||
|
||||
def __init__(self, session, settings_manager, component_id, repository_name, table_name):
|
||||
super().__init__(component_id)
|
||||
self._session = session
|
||||
self.settings_manager = settings_manager
|
||||
self.repository_name = repository_name
|
||||
self.table_name = table_name
|
||||
|
||||
def emit(self, data: Any = None) -> Generator[Any, None, None]:
|
||||
yield from DataHelper.get(self._session, self.settings_manager, self.repository_name, self.table_name, Expando)
|
||||
|
||||
|
||||
class JiraDataProducer(DataProducer):
|
||||
"""Base class for data producers that emit data from Jira."""
|
||||
|
||||
logger = logging.getLogger("DataProcessor.Producer.Jira")
|
||||
|
||||
def __init__(self, session, settings_manager, component_id, request_type='search', request='', fields=None):
|
||||
super().__init__(component_id)
|
||||
self._session = session
|
||||
self.settings_manager = settings_manager
|
||||
self.request_type = request_type.value if isinstance(request_type, JiraRequestTypes) else request_type
|
||||
self.request = request
|
||||
self.fields = fields
|
||||
self.db = AdminDbManager(session, settings_manager).jira
|
||||
|
||||
def emit(self, data: Any = None) -> Generator[Any, None, None]:
|
||||
self.logger.debug(f"Emitting data from Jira: {self.request_type} {self.request} {self.fields}")
|
||||
|
||||
preprocessor = PlainTextPreprocessor()
|
||||
preprocessed_fields = preprocessor.preprocess(self.fields, {"data": data})
|
||||
self.logger.debug(f" {preprocessed_fields=}")
|
||||
|
||||
jira = Jira(self.db.user_name, self.db.api_token, fields=preprocessed_fields)
|
||||
if not hasattr(jira, self.request_type):
|
||||
raise ValueError(f"Invalid request type: {self.request_type}")
|
||||
|
||||
preprocessed_request = preprocessor.preprocess(self.request, {"data": data})
|
||||
self.logger.debug(f" {preprocessed_request=}")
|
||||
|
||||
yield from getattr(jira, self.request_type)(preprocessed_request)
|
||||
|
||||
|
||||
class DefaultDataFilter(DataFilter):
|
||||
def __init__(self, component_id: str, filter_expression: str):
|
||||
super().__init__(component_id)
|
||||
self.filter_expression = filter_expression
|
||||
self._ast_tree = ast.parse(filter_expression, "<user input>", 'eval')
|
||||
self._compiled = compile(self._ast_tree, "<string>", "eval")
|
||||
visitor = UnreferencedNamesVisitor()
|
||||
self._unreferenced_names = visitor.get_names(self._ast_tree)
|
||||
|
||||
"""Default data filter that returns True for all data items."""
|
||||
|
||||
def filter(self, data: Any) -> bool:
|
||||
my_locals = {name: data.get(name) for name in self._unreferenced_names if hasattr(data, name)}
|
||||
return eval(self._compiled, globals(), my_locals)
|
||||
|
||||
|
||||
class WorkflowEngine:
|
||||
"""Orchestrates the data processing pipeline using generators."""
|
||||
|
||||
def __init__(self):
|
||||
self.processors: list[DataProcessor] = []
|
||||
self.has_error = False
|
||||
self.global_error = None
|
||||
self.errors = {}
|
||||
|
||||
def add_processor(self, processor: DataProcessor) -> 'WorkflowEngine':
|
||||
"""Add a data processor to the pipeline."""
|
||||
self.processors.append(processor)
|
||||
return self
|
||||
|
||||
def _process_single_item(self, item: Any, processor_index: int = 0) -> Generator[Any, None, None]:
|
||||
"""Process a single item through the remaining processors."""
|
||||
if processor_index >= len(self.processors):
|
||||
yield item
|
||||
return
|
||||
|
||||
processor = self.processors[processor_index]
|
||||
|
||||
# Process the item through the current processor
|
||||
for processed_item in processor.process(item):
|
||||
# Recursively process through remaining processors
|
||||
yield from self._process_single_item(processed_item, processor_index + 1)
|
||||
|
||||
def run(self) -> Generator[Any, None, None]:
|
||||
"""
|
||||
Run the workflow pipeline and yield results one by one.
|
||||
The first processor must be a DataProducer.
|
||||
"""
|
||||
if not self.processors:
|
||||
self.has_error = False
|
||||
self.global_error = "No processors in the pipeline"
|
||||
raise ValueError(self.global_error)
|
||||
|
||||
first_processor = self.processors[0]
|
||||
|
||||
if not isinstance(first_processor, DataProducer):
|
||||
self.has_error = False
|
||||
self.global_error = "First processor must be a DataProducer"
|
||||
raise ValueError(self.global_error)
|
||||
|
||||
for item in first_processor.process(None):
|
||||
yield from self._process_single_item(item, 1)
|
||||
|
||||
def run_to_list(self) -> list[Any]:
|
||||
"""
|
||||
Run the workflow and return all results as a list.
|
||||
Use this method when you need all results at once.
|
||||
"""
|
||||
try:
|
||||
return list(self.run())
|
||||
except DataProcessorError as err:
|
||||
self.has_error = True
|
||||
self.errors[err.component_id] = err.error
|
||||
return []
|
||||
except Exception as err:
|
||||
self.has_error = True
|
||||
self.global_error = str(err)
|
||||
return []
|
||||
@@ -43,6 +43,12 @@ class Contains:
|
||||
"""
|
||||
s: str
|
||||
|
||||
@dataclasses.dataclass
|
||||
class DoesNotContain:
|
||||
"""
|
||||
To check if the attribute does not contain a specific value
|
||||
"""
|
||||
s: str
|
||||
|
||||
@dataclasses.dataclass
|
||||
class JsonViewerNode:
|
||||
@@ -420,7 +426,8 @@ def matches(actual, expected, path=""):
|
||||
assert matches(actual_child, expected_child)
|
||||
|
||||
elif isinstance(expected, NotStr):
|
||||
assert actual.s.lstrip('\n').startswith(expected.s), \
|
||||
to_compare = actual.s.lstrip('\n').lstrip()
|
||||
assert to_compare.startswith(expected.s), \
|
||||
f"{print_path(path)}NotStr are different: '{actual.s.lstrip('\n')}' != '{expected.s}'."
|
||||
|
||||
elif hasattr(actual, "tag"):
|
||||
@@ -448,6 +455,11 @@ def matches(actual, expected, path=""):
|
||||
elif isinstance(expected.attrs[expected_attr], Contains):
|
||||
assert expected.attrs[expected_attr].s in actual.attrs[expected_attr], \
|
||||
f"{print_path(path)}Attribute '{expected_attr}' does not contain '{expected.attrs[expected_attr].s}': actual='{actual.attrs[expected_attr]}', expected ='{expected.attrs[expected_attr].s}'."
|
||||
|
||||
elif isinstance(expected.attrs[expected_attr], DoesNotContain):
|
||||
assert expected.attrs[expected_attr].s not in actual.attrs[expected_attr], \
|
||||
f"{print_path(path)}Attribute '{expected_attr}' does contain '{expected.attrs[expected_attr].s}' while it must not: actual='{actual.attrs[expected_attr]}'."
|
||||
|
||||
|
||||
else:
|
||||
assert actual.attrs[expected_attr] == expected.attrs[expected_attr], \
|
||||
@@ -630,10 +642,10 @@ def extract_table_values_new(ft, header=True):
|
||||
# first, get the header
|
||||
|
||||
if header:
|
||||
header = search_elements_by_name(ft, attrs={"class": "dt2-header"}, comparison_method='contains')[0]
|
||||
header_element = search_elements_by_name(ft, attrs={"class": "dt2-header"}, comparison_method='contains')[0]
|
||||
header_map = {}
|
||||
res = OrderedDict()
|
||||
for row in header.children:
|
||||
for row in header_element.children:
|
||||
col_id = row.attrs["data-col"]
|
||||
title = row.attrs["data-tooltip"]
|
||||
header_map[col_id] = title
|
||||
@@ -642,9 +654,10 @@ def extract_table_values_new(ft, header=True):
|
||||
body = search_elements_by_name(ft, attrs={"class": "dt2-body"}, comparison_method='contains')[0]
|
||||
for row in body.children:
|
||||
for col in row.children:
|
||||
col_id = col.attrs["data-col"]
|
||||
cell_value = _get_cell_content_value(col)
|
||||
res[header_map[col_id]].append(cell_value)
|
||||
if hasattr(col, "attrs"):
|
||||
col_id = col.attrs["data-col"]
|
||||
cell_value = _get_cell_content_value(col)
|
||||
res[header_map[col_id]].append(cell_value)
|
||||
|
||||
return res
|
||||
|
||||
@@ -741,11 +754,22 @@ def _get_element_value(element):
|
||||
|
||||
|
||||
def icon(name: str):
|
||||
"""
|
||||
Test if an element is an icon
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
return NotStr(f'<svg name="{name}"')
|
||||
|
||||
|
||||
def div_icon(name: str):
|
||||
return Div(NotStr(f'<svg name="{name}"'))
|
||||
def div_icon(name: str, cls=None):
|
||||
"""
|
||||
Test if an element is an icon wrapped in a div
|
||||
:param name:
|
||||
:param cls:
|
||||
:return:
|
||||
"""
|
||||
return Div(NotStr(f'<svg name="{name}"'), cls=cls)
|
||||
|
||||
|
||||
def span_icon(name: str):
|
||||
|
||||
34
tests/my_mocks.py
Normal file
34
tests/my_mocks.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.tabs.components.MyTabs import MyTabs
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tabs_manager():
|
||||
class MockTabsManager(MagicMock):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, spec=MyTabs, **kwargs)
|
||||
self.request_new_tab_id = MagicMock(side_effect=["new_tab_id", "new_tab_2", "new_tab_3", StopIteration])
|
||||
self.tabs = {}
|
||||
self.tabs_by_key = {}
|
||||
|
||||
def add_tab(self, title, content, key: str | tuple = None, tab_id: str = None, icon=None):
|
||||
self.tabs[tab_id] = (title, content)
|
||||
self.tabs_by_key[key] = (title, content)
|
||||
|
||||
def set_tab_content(self, tab_id, content, title=None, key: str | tuple = None, active=None):
|
||||
self.tabs[tab_id] = (title, content)
|
||||
self.tabs_by_key[key] = (title, content)
|
||||
|
||||
def refresh(self):
|
||||
return Div(
|
||||
Div(
|
||||
[Div(title) for title in self.tabs.keys()]
|
||||
),
|
||||
list(self.tabs.values())[-1]
|
||||
)
|
||||
|
||||
return MockTabsManager()
|
||||
66
tests/test_data_helper.py
Normal file
66
tests/test_data_helper.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
|
||||
from components.datagrid_new.components.DataGrid import DataGrid
|
||||
from core.Expando import Expando
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from utils.Datahelper import DataHelper
|
||||
|
||||
TEST_GRID_ID = "testing_grid_id"
|
||||
TEST_GRID_KEY = ("RepoName", "TableName")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def settings_manager():
|
||||
return SettingsManager(MemoryDbEngine())
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def datagrid(session, settings_manager):
|
||||
dg = DataGrid(session,
|
||||
_id=TEST_GRID_ID,
|
||||
settings_manager=settings_manager,
|
||||
key=TEST_GRID_KEY,
|
||||
boundaries={"height": 500, "width": 800})
|
||||
|
||||
df = pd.DataFrame({
|
||||
'Name': ['Alice', 'Bob'],
|
||||
'Age': [20, 25],
|
||||
'Is Student': [True, False],
|
||||
})
|
||||
|
||||
dg.init_from_dataframe(df, save_state=True)
|
||||
return dg
|
||||
|
||||
|
||||
def test_i_can_get_data_as_dataframe(session, settings_manager, datagrid):
|
||||
res = DataHelper.get(session, settings_manager, "RepoName", "TableName")
|
||||
assert isinstance(res, pd.DataFrame)
|
||||
assert res.equals(datagrid.get_dataframe())
|
||||
|
||||
|
||||
def test_i_can_get_data_as_dataclass(session, settings_manager, datagrid):
|
||||
@dataclass
|
||||
class DataclassTestClass:
|
||||
name: str
|
||||
age: int
|
||||
is_student: bool
|
||||
|
||||
res = DataHelper.get(session, settings_manager, "RepoName", "TableName", DataclassTestClass)
|
||||
assert isinstance(res, list)
|
||||
assert res == [
|
||||
DataclassTestClass("Alice", 20, True),
|
||||
DataclassTestClass("Bob", 25, False),
|
||||
]
|
||||
|
||||
|
||||
def test_i_can_get_data_as_expando(session, settings_manager, datagrid):
|
||||
res = DataHelper.get(session, settings_manager, "RepoName", "TableName", Expando)
|
||||
assert isinstance(res, list)
|
||||
assert res == [
|
||||
Expando({"name": "Alice", "age": 20, "is_student": True}),
|
||||
Expando({"name": "Bob", "age": 25, "is_student": False})
|
||||
]
|
||||
|
||||
@@ -509,3 +509,18 @@ def test_i_can_compute_footer_menu_position_when_not_enough_space(dg):
|
||||
)
|
||||
|
||||
assert matches(menu, expected)
|
||||
|
||||
|
||||
def test_the_content_of_the_cell_is_escaped(empty_dg):
|
||||
df = pd.DataFrame({
|
||||
'value': ['<div> My Content </div>'],
|
||||
'value2': ['{My Content}'],
|
||||
})
|
||||
my_dg = empty_dg.init_from_dataframe(df)
|
||||
|
||||
actual = my_dg.__ft__()
|
||||
table_content = extract_table_values_new(actual, header=True)
|
||||
|
||||
assert table_content == OrderedDict({
|
||||
'value': ['<div> My Content </div>'],
|
||||
'value2': ['{My Content}']})
|
||||
|
||||
@@ -235,3 +235,40 @@ def test_put_many_save_only_if_necessary(engine):
|
||||
|
||||
entry_content = engine.load(FAKE_USER_ID, "MyEntry")
|
||||
assert entry_content[TAG_PARENT] == [None] # Still None, nothing was save
|
||||
|
||||
|
||||
def test_i_can_retrieve_history_using_put(engine):
|
||||
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(1, "a", False))
|
||||
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(2, "a", False))
|
||||
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(3, "a", False))
|
||||
|
||||
history = engine.history(FAKE_USER_ID, "MyEntry")
|
||||
assert len(history) == 3
|
||||
|
||||
v0 = engine.load(FAKE_USER_ID, "MyEntry", history[0])
|
||||
v1 = engine.load(FAKE_USER_ID, "MyEntry", history[1])
|
||||
v2 = engine.load(FAKE_USER_ID, "MyEntry", history[2])
|
||||
|
||||
assert v0["key1"] == DummyObj(3, "a", False)
|
||||
assert v1["key1"] == DummyObj(2, "a", False)
|
||||
assert v2["key1"] == DummyObj(1, "a", False)
|
||||
|
||||
assert v2[TAG_PARENT] == [None]
|
||||
|
||||
def test_i_can_retrieve_history_using_save(engine):
|
||||
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(1, "a", False)})
|
||||
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(2, "a", False)})
|
||||
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(3, "a", False)})
|
||||
|
||||
history = engine.history(FAKE_USER_ID, "MyEntry")
|
||||
assert len(history) == 3
|
||||
|
||||
v0 = engine.load(FAKE_USER_ID, "MyEntry", history[0])
|
||||
v1 = engine.load(FAKE_USER_ID, "MyEntry", history[1])
|
||||
v2 = engine.load(FAKE_USER_ID, "MyEntry", history[2])
|
||||
|
||||
assert v0["key1"] == DummyObj(3, "a", False)
|
||||
assert v1["key1"] == DummyObj(2, "a", False)
|
||||
assert v2["key1"] == DummyObj(1, "a", False)
|
||||
|
||||
assert v2[TAG_PARENT] == [None]
|
||||
75
tests/test_expando.py
Normal file
75
tests/test_expando.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import pytest
|
||||
|
||||
from core.Expando import Expando
|
||||
|
||||
|
||||
def test_i_can_get_properties():
|
||||
props = {"a": 10,
|
||||
"b": {
|
||||
"c": "value",
|
||||
"d": 20
|
||||
}}
|
||||
dynamic = Expando(props)
|
||||
|
||||
assert dynamic.a == 10
|
||||
assert dynamic.b.c == "value"
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
assert dynamic.unknown == "some_value"
|
||||
|
||||
|
||||
def test_i_can_get():
|
||||
props = {"a": 10,
|
||||
"b": {
|
||||
"c": "value",
|
||||
"d": 20
|
||||
}}
|
||||
dynamic = Expando(props)
|
||||
|
||||
assert dynamic.get("a") == 10
|
||||
assert dynamic.get("b.c") == "value"
|
||||
assert dynamic.get("unknown") is None
|
||||
|
||||
|
||||
def test_i_can_get_from_list():
|
||||
props = {"a": [{"c": "value1", "d": 1}, {"c": "value2", "d": 2}]}
|
||||
dynamic = Expando(props)
|
||||
|
||||
assert dynamic.get("a.c") == ["value1", "value2"]
|
||||
|
||||
|
||||
def test_none_is_returned_when_get_from_list_and_property_does_not_exist():
|
||||
props = {"a": [{"c": "value1", "d": 1},
|
||||
{"a": "value2", "d": 2} # 'c' does not exist in the second row
|
||||
]}
|
||||
dynamic = Expando(props)
|
||||
|
||||
assert dynamic.get("a.c") == ["value1"]
|
||||
|
||||
|
||||
def test_i_can_manage_none_values():
|
||||
props = {"a": 10,
|
||||
"b": None}
|
||||
dynamic = Expando(props)
|
||||
|
||||
assert dynamic.get("b.c") is None
|
||||
|
||||
|
||||
def test_i_can_manage_none_values_in_list():
|
||||
props = {"a": [{"b": {"c": "value"}},
|
||||
{"b": None}
|
||||
]}
|
||||
dynamic = Expando(props)
|
||||
|
||||
assert dynamic.get("a.b.c") == ["value"]
|
||||
|
||||
|
||||
def test_i_can_add_new_properties():
|
||||
props = {"a": 10,
|
||||
"b": 20}
|
||||
dynamic = Expando(props)
|
||||
dynamic["c"] = 30
|
||||
|
||||
assert dynamic.a == 10
|
||||
assert dynamic.b == 20
|
||||
assert dynamic.c == 30
|
||||
@@ -11,7 +11,7 @@ def sample_structure():
|
||||
"""
|
||||
A pytest fixture to provide a sample tree structure for testing.
|
||||
"""
|
||||
return Html(
|
||||
return Div(
|
||||
Header(cls="first-class"),
|
||||
Body(
|
||||
"hello world",
|
||||
@@ -26,13 +26,13 @@ def sample_structure():
|
||||
|
||||
@pytest.mark.parametrize("value, expected, expected_error", [
|
||||
(Div(), "value",
|
||||
"The types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=div((),{})\nexpected=value."),
|
||||
"The types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=<div></div>\nexpected=value."),
|
||||
(Div(), A(),
|
||||
"The elements are different: 'div' != 'a'."),
|
||||
(Div(Div()), Div(A()),
|
||||
"Path 'div':\n\tThe elements are different: 'div' != 'a'."),
|
||||
(Div(A(Span())), Div(A("element")),
|
||||
"Path 'div.a':\n\tThe types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=span((),{})\nexpected=element."),
|
||||
"Path 'div.a':\n\tThe types are different: <class 'fastcore.xml.FT'> != <class 'str'>\nactual=<span></span>\nexpected=element."),
|
||||
(Div(attr="one"), Div(attr="two"),
|
||||
"Path 'div':\n\tThe values are different for 'attr' : 'one' != 'two'."),
|
||||
(Div(A(attr="alpha")), Div(A(attr="beta")),
|
||||
@@ -57,7 +57,8 @@ def sample_structure():
|
||||
"Path 'div[class=a long attr]':\n\tAttribute 'class' does not start with 'different start': actual='a long attr', expected ='different start'."),
|
||||
(Div(cls="a long attr"), Div(cls=Contains("not included")),
|
||||
"Path 'div[class=a long attr]':\n\tAttribute 'class' does not contain 'not included': actual='a long attr', expected ='not included'."),
|
||||
|
||||
(Div(cls="a long attr"), Div(cls=DoesNotContain("long attr")),
|
||||
"Path 'div[class=a long attr]':\n\tAttribute 'class' does contain 'long attr' while it must not: actual='a long attr'."),
|
||||
])
|
||||
def test_matches_error_expected(value, expected, expected_error):
|
||||
with pytest.raises(AssertionError) as error:
|
||||
@@ -75,6 +76,7 @@ def test_matches_error_expected(value, expected, expected_error):
|
||||
(Div(), Div(Empty)),
|
||||
(Div(cls="a long attr"), Div(cls=StartsWith("a long"))),
|
||||
(Div(cls="a long attr"), Div(cls=Contains("long"))),
|
||||
(Div(cls="a long attr"), Div(cls=DoesNotContain("xxxx"))),
|
||||
])
|
||||
def test_matches_success_expected(value, expected):
|
||||
assert matches(value, expected)
|
||||
|
||||
491
tests/test_preprocessor.py
Normal file
491
tests/test_preprocessor.py
Normal file
@@ -0,0 +1,491 @@
|
||||
import pytest
|
||||
|
||||
from core.preprocessor import PlainTextPreprocessor, VariableParsingError, VariableProcessingError
|
||||
|
||||
|
||||
def test_i_can_parse_empty_text():
|
||||
"""Test that I can parse empty text input"""
|
||||
processor = PlainTextPreprocessor()
|
||||
result = processor.parse("")
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_i_can_parse_text_without_variables():
|
||||
"""Test that I can parse text without any variables"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "This is just plain text with no variables"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "text",
|
||||
"content": text,
|
||||
"start": 0,
|
||||
"end": len(text)
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_simple_variable():
|
||||
"""Test that I can parse text with only a simple variable"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 9
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_underscores():
|
||||
"""Test that I can parse variable with underscores in name"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$my_variable_name"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "my_variable_name",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 17
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_numbers():
|
||||
"""Test that I can parse variable with numbers in name"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$var123"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "var123",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 7
|
||||
}]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_properties_with_underscores_and_numbers():
|
||||
"""Test that I can parse property names with underscores and numbers"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$var._prop123.sub_prop_456"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "var",
|
||||
"properties": ["_prop123", "sub_prop_456"],
|
||||
"start": 0,
|
||||
"end": 26
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_starting_with_underscore():
|
||||
"""Test that I can parse variable name starting with underscore"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$_private_var"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "_private_var",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 13
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_single_property():
|
||||
"""Test that I can parse variable with one property"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable.prop"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": ["prop"],
|
||||
"start": 0,
|
||||
"end": 14
|
||||
}]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_multiple_properties():
|
||||
"""Test that I can parse variable with multiple properties"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable.prop.subprop.deep"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": ["prop", "subprop", "deep"],
|
||||
"start": 0,
|
||||
"end": 27
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_text_with_variable_in_middle():
|
||||
"""Test that I can parse text with variable in the middle"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "project > $project_id and more"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": "project > ",
|
||||
"start": 0,
|
||||
"end": 10
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "project_id",
|
||||
"properties": [],
|
||||
"start": 10,
|
||||
"end": 21
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " and more",
|
||||
"start": 21,
|
||||
"end": 30
|
||||
}
|
||||
]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_multiple_variables():
|
||||
"""Test that I can parse text with multiple variables"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "value == $variable.prop and $other_var"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": "value == ",
|
||||
"start": 0,
|
||||
"end": 9
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": ["prop"],
|
||||
"start": 9,
|
||||
"end": 23
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " and ",
|
||||
"start": 23,
|
||||
"end": 28
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "other_var",
|
||||
"properties": [],
|
||||
"start": 28,
|
||||
"end": 38
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_preserve_all_whitespace():
|
||||
"""Test that I can preserve all whitespace including tabs and newlines"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = " $var \t\n $other.prop "
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": " ",
|
||||
"start": 0,
|
||||
"end": 2
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "var",
|
||||
"properties": [],
|
||||
"start": 2,
|
||||
"end": 6
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " \t\n ",
|
||||
"start": 6,
|
||||
"end": 12
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "other",
|
||||
"properties": ["prop"],
|
||||
"start": 12,
|
||||
"end": 23
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " ",
|
||||
"start": 23,
|
||||
"end": 25
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_text_with_special_characters():
|
||||
"""Test that I can parse text with special characters"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "Hello $user! @#%^&*()+={}[]|\\:;\"'<>?,./~`"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Hello ",
|
||||
"start": 0,
|
||||
"end": 6
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "user",
|
||||
"properties": [],
|
||||
"start": 6,
|
||||
"end": 11
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "! @#%^&*()+={}[]|\\:;\"'<>?,./~`",
|
||||
"start": 11,
|
||||
"end": 41
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_complex_expression():
|
||||
"""Test that I can parse complex but valid expression"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "if ($user.profile.age > 18 && $user.status == 'active') { $action.execute(); }"
|
||||
result = processor.parse(text)
|
||||
|
||||
# Should parse successfully and find all variables
|
||||
variables = [elem for elem in result if elem["type"] == "variable"]
|
||||
assert len(variables) == 3
|
||||
|
||||
# Check variable details
|
||||
assert variables[0]["name"] == "user"
|
||||
assert variables[0]["properties"] == ["profile", "age"]
|
||||
|
||||
assert variables[1]["name"] == "user"
|
||||
assert variables[1]["properties"] == ["status"]
|
||||
|
||||
assert variables[2]["name"] == "action"
|
||||
assert variables[2]["properties"] == ["execute"]
|
||||
|
||||
|
||||
def test_positions_are_accurate():
|
||||
"""Test that element positions are accurate"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "abc$var123*def"
|
||||
result = processor.parse(text)
|
||||
|
||||
assert len(result) == 3
|
||||
|
||||
# Text before
|
||||
assert result[0]["start"] == 0
|
||||
assert result[0]["end"] == 3
|
||||
assert result[0]["content"] == "abc"
|
||||
|
||||
# Variable
|
||||
assert result[1]["start"] == 3
|
||||
assert result[1]["end"] == 10
|
||||
assert result[1]["name"] == "var123"
|
||||
|
||||
# Text after
|
||||
assert result[2]["start"] == 10
|
||||
assert result[2]["end"] == 14
|
||||
assert result[2]["content"] == "*def"
|
||||
|
||||
|
||||
# Error cases
|
||||
def test_i_cannot_parse_dollar_alone_at_end():
|
||||
"""Test that I cannot parse $ at the end of text"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "Hello $"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 7
|
||||
assert "Invalid syntax" in str(exc_info.value)
|
||||
# assert "Variable name missing after '$'" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_dollar_alone_in_middle():
|
||||
"""Test that I cannot parse $ alone in middle of text"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "Hello $ world"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 7
|
||||
assert "Invalid syntax" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_dot_immediately_after_dollar():
|
||||
"""Test that I cannot parse $.property (dot immediately after $)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$.property"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 1
|
||||
assert "Invalid syntax" in str(exc_info.value)
|
||||
# assert "Variable name missing before '.'" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_variable_ending_with_dot():
|
||||
"""Test that I cannot parse $variable. (dot at the end)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable."
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text", ["$variable. prop", "$variable .prop", "$variable . prop"])
|
||||
def test_i_cannot_parse_variable_when_space_in_variable_name(text):
|
||||
"""Test that I cannot parse $variable. (dot at the end)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
# text = "$variable. "
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_variable_with_empty_property():
|
||||
"""Test that I cannot parse $variable..property (empty property between dots)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable..property"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_variable_ending_with_multiple_dots():
|
||||
"""Test that I cannot parse $variable... (multiple dots at end)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable..."
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_when_consecutive_variables():
|
||||
"""Test that I can parse consecutive variables without text between"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$var1$var2"
|
||||
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 5
|
||||
assert "Invalid syntax." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_first_error_is_reported_with_multiple_errors():
|
||||
"""Test that first error is reported when multiple $ errors exist"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$ and $. and $var."
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
# Should report the first error ($ alone)
|
||||
assert exc_info.value.position == 1
|
||||
|
||||
|
||||
def test_i_can_preprocess_simple_variable():
|
||||
"""Test preprocessing text with a simple variable"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"name": "John"}
|
||||
result = processor.preprocess("Hello $name!", namespace)
|
||||
assert result == "Hello John!"
|
||||
|
||||
|
||||
def test_i_can_preprocess_with_properties():
|
||||
"""Test preprocessing text with variable properties"""
|
||||
|
||||
class User:
|
||||
def __init__(self):
|
||||
self.profile = type('Profile', (), {'age': 25})()
|
||||
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"user": User()}
|
||||
result = processor.preprocess("Age: $user.profile.age", namespace)
|
||||
assert result == "Age: 25"
|
||||
|
||||
|
||||
def test_i_can_preprocess_multiple_variables():
|
||||
"""Test preprocessing text with multiple variables"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"first": "Hello", "second": "World"}
|
||||
result = processor.preprocess("$first $second!", namespace)
|
||||
assert result == "Hello World!"
|
||||
|
||||
|
||||
def test_i_can_preprocess_empty_text():
|
||||
"""Test preprocessing empty text"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {}
|
||||
result = processor.preprocess("", namespace)
|
||||
assert result == ""
|
||||
|
||||
|
||||
def test_i_cannot_preprocess_undefined_variable():
|
||||
"""Test preprocessing with undefined variable raises error"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {}
|
||||
with pytest.raises(VariableProcessingError) as exc_info:
|
||||
processor.preprocess("$undefined_var", namespace)
|
||||
assert "Variable 'undefined_var' is not defined" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_preprocess_invalid_property():
|
||||
"""Test preprocessing with invalid property access"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"obj": object()}
|
||||
with pytest.raises(VariableProcessingError) as exc_info:
|
||||
processor.preprocess("some text $obj.invalid_prop", namespace)
|
||||
|
||||
assert "Invalid property 'invalid_prop' for variable 'obj'" in str(exc_info.value)
|
||||
assert exc_info.value.position == 14
|
||||
@@ -25,7 +25,7 @@ def tabs_manager():
|
||||
self._called_methods: list[tuple] = []
|
||||
|
||||
def add_tab(self, *args, **kwargs):
|
||||
self._called_methods.append(("set_tab_content", args, kwargs))
|
||||
self._called_methods.append(("add_tab", args, kwargs))
|
||||
|
||||
table_name, content, key = args
|
||||
self.tabs.append({"table_name": table_name, "content": content, "key": key})
|
||||
|
||||
168
tests/test_undo_redo.py
Normal file
168
tests/test_undo_redo.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import Div
|
||||
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.undo_redo.constants import UndoRedoAttrs
|
||||
from core.dbengine import DbEngine
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, div_icon, Contains, DoesNotContain
|
||||
from my_mocks import tabs_manager
|
||||
|
||||
DB_ENGINE_ROOT = "undo_redo_test_db"
|
||||
TEST_DB_ENTRY = "TestDbEntry"
|
||||
TEST_DB_KEY = "TestDbKey"
|
||||
|
||||
|
||||
class TestCommand:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, TestCommand):
|
||||
return False
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.value)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def engine(session):
|
||||
if os.path.exists(DB_ENGINE_ROOT):
|
||||
shutil.rmtree(DB_ENGINE_ROOT)
|
||||
|
||||
engine = DbEngine(DB_ENGINE_ROOT)
|
||||
engine.init(session["user_id"])
|
||||
|
||||
yield engine
|
||||
|
||||
shutil.rmtree(DB_ENGINE_ROOT)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def settings_manager(engine):
|
||||
return SettingsManager(engine=engine)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def undo_redo(session, tabs_manager, settings_manager):
|
||||
return UndoRedo(session,
|
||||
UndoRedo.create_component_id(session),
|
||||
settings_manager=settings_manager,
|
||||
tabs_manager=tabs_manager)
|
||||
|
||||
|
||||
def init_command(session, settings_manager, undo_redo, value, on_undo=None):
|
||||
settings_manager.save(session, TEST_DB_ENTRY, {TEST_DB_KEY: TestCommand(value)})
|
||||
undo_redo.snapshot(UndoRedoAttrs(f"Set value to {value}", on_undo=on_undo), TEST_DB_ENTRY, TEST_DB_KEY)
|
||||
|
||||
|
||||
def test_i_can_render(undo_redo):
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to undo."),
|
||||
Div(div_icon("redo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to redo."),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_i_can_render_when_undoing_and_redoing(session, settings_manager, undo_redo):
|
||||
init_command(session, settings_manager, undo_redo, "1")
|
||||
init_command(session, settings_manager, undo_redo, "2")
|
||||
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set value to 2'."),
|
||||
Div(div_icon("redo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to redo."),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
assert matches(actual, expected)
|
||||
|
||||
undo_redo.undo() # The command is now undone. We can redo it and undo the first command.
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set value to 1'."),
|
||||
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Redo 'Set value to 2'."),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
assert matches(actual, expected)
|
||||
|
||||
undo_redo.undo() # Undo again, I cannot undo anymore.
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=Contains("mmt-btn-disabled"))),
|
||||
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled"))),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
assert matches(actual, expected)
|
||||
|
||||
undo_redo.redo() # Redo once.
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled"))),
|
||||
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled"))),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
assert matches(actual, expected)
|
||||
|
||||
undo_redo.redo() # Redo a second time.
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled"))),
|
||||
Div(div_icon("redo", cls=Contains("mmt-btn-disabled"))),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_values_are_correctly_reset(session, settings_manager, undo_redo):
|
||||
# checks that the values are correctly returned
|
||||
# Only checks that hx_swap_oob="true" is automatically put when id is present in the return
|
||||
|
||||
def on_undo():
|
||||
current = settings_manager.get(session, TEST_DB_ENTRY, TEST_DB_KEY)
|
||||
return Div(current.value, id='an_id')
|
||||
|
||||
init_command(session, settings_manager, undo_redo, "1", on_undo=on_undo)
|
||||
init_command(session, settings_manager, undo_redo, "2", on_undo=on_undo)
|
||||
|
||||
self, res = undo_redo.undo()
|
||||
expected = Div("1", id='an_id', hx_swap_oob="true")
|
||||
assert matches(res, expected)
|
||||
|
||||
self, res = undo_redo.redo()
|
||||
expected = Div("2", id='an_id', hx_swap_oob="true")
|
||||
assert matches(res, expected)
|
||||
|
||||
|
||||
def test_i_can_manage_when_the_entry_was_not_present(session, settings_manager, undo_redo):
|
||||
def on_undo():
|
||||
snapshot = settings_manager.load(session, TEST_DB_ENTRY)
|
||||
if TEST_DB_KEY in snapshot:
|
||||
return Div(snapshot[TEST_DB_KEY].value, id='an_id')
|
||||
else:
|
||||
return Div("**Not Found**", id='an_id')
|
||||
|
||||
init_command(session, settings_manager, undo_redo, "1", on_undo=on_undo)
|
||||
|
||||
self, res = undo_redo.undo()
|
||||
expected = Div("**Not Found**", id='an_id', hx_swap_oob="true")
|
||||
assert matches(res, expected)
|
||||
|
||||
|
||||
def test_history_is_rewritten_when_pushing_a_command_after_undo(session, settings_manager, undo_redo):
|
||||
init_command(session, settings_manager, undo_redo, "1")
|
||||
init_command(session, settings_manager, undo_redo, "2")
|
||||
init_command(session, settings_manager, undo_redo, "3")
|
||||
|
||||
undo_redo.undo()
|
||||
undo_redo.undo()
|
||||
init_command(session, settings_manager, undo_redo, "5")
|
||||
|
||||
assert len(undo_redo.history) == 3 # do not forget that history always has a default command with digest = None
|
||||
@@ -5,10 +5,13 @@
|
||||
# assert column_to_number("A") == 1
|
||||
# assert column_to_number("AA") == 27
|
||||
# assert column_to_number("ZZZ") == 475254
|
||||
import ast
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import Div
|
||||
|
||||
from core.utils import make_html_id, update_elements, snake_case_to_capitalized_words, merge_classes
|
||||
from core.utils import make_html_id, update_elements, snake_case_to_capitalized_words, merge_classes, \
|
||||
UnreferencedNamesVisitor
|
||||
|
||||
|
||||
@pytest.mark.parametrize("string, expected", [
|
||||
@@ -110,7 +113,7 @@ def test_i_can_merge_cls():
|
||||
kwargs = {}
|
||||
assert merge_classes("class1", kwargs) == "class1"
|
||||
assert kwargs == {}
|
||||
|
||||
|
||||
kwargs = {"foo": "bar"}
|
||||
assert merge_classes("class1", kwargs) == "class1"
|
||||
assert kwargs == {"foo": "bar"}
|
||||
@@ -127,4 +130,21 @@ def test_i_can_merge_cls():
|
||||
assert merge_classes("class1", ("class2", "class3")) == "class1 class2 class3"
|
||||
|
||||
# values are unique
|
||||
assert merge_classes("class2", "class1", ("class1", ), {"cls": "class1"}) == "class2 class1"
|
||||
assert merge_classes("class2", "class1", ("class1",), {"cls": "class1"}) == "class2 class1"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("source, expected", [
|
||||
("a,b", {"a", "b"}),
|
||||
("isinstance(a, int)", {"a", "int"}),
|
||||
("date.today()", set()),
|
||||
("test()", set()),
|
||||
("sheerka.test()", set()),
|
||||
("for i in range(10): pass", set()),
|
||||
("func(x=a, y=b)", {"a", "b", "x", "y"}),
|
||||
])
|
||||
def test_i_can_get_unreferenced_variables_from_simple_expressions(source, expected):
|
||||
ast_ = ast.parse(source)
|
||||
visitor = UnreferencedNamesVisitor()
|
||||
visitor.visit(ast_)
|
||||
|
||||
assert visitor.names == expected
|
||||
|
||||
179
tests/test_workflow_designer.py
Normal file
179
tests/test_workflow_designer.py
Normal file
@@ -0,0 +1,179 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fastcore.basics import NotStr
|
||||
from fasthtml.components import *
|
||||
from fasthtml.xtend import Script
|
||||
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.WorkflowDesigner import WorkflowDesigner, COMPONENT_TYPES
|
||||
from components.workflows.constants import ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, Connection
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, Contains
|
||||
from my_mocks import tabs_manager
|
||||
|
||||
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_undo_redo(session):
|
||||
# Create a mock UndoRedo instance
|
||||
undo_redo = MagicMock(spec=UndoRedo)
|
||||
|
||||
# Store original get method
|
||||
original_get = InstanceManager.get
|
||||
|
||||
def mock_get(sess, instance_id, *args, **kwargs):
|
||||
if instance_id == UndoRedo.create_component_id(sess):
|
||||
return undo_redo
|
||||
return original_get(sess, instance_id, *args, **kwargs)
|
||||
|
||||
# Replace get method with our mock
|
||||
InstanceManager.get = mock_get
|
||||
|
||||
yield undo_redo
|
||||
|
||||
# Restore original get method after test
|
||||
InstanceManager.get = original_get
|
||||
|
||||
@pytest.fixture
|
||||
def designer(session, tabs_manager):
|
||||
return WorkflowDesigner(session=session, _id=TEST_WORKFLOW_DESIGNER_ID,
|
||||
settings_manager=SettingsManager(engine=MemoryDbEngine()),
|
||||
tabs_manager=tabs_manager,
|
||||
key=TEST_WORKFLOW_DESIGNER_ID,
|
||||
designer_settings=WorkflowsDesignerSettings("Workflow Name"),
|
||||
boundaries={"height": 500, "width": 800}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def producer_component():
|
||||
return WorkflowComponent(
|
||||
"comp_producer",
|
||||
ProcessorTypes.Producer,
|
||||
10,
|
||||
100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Producer]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Producer]["description"],
|
||||
{"processor_name": ProcessorTypes.Producer[0]}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def filter_component():
|
||||
return WorkflowComponent(
|
||||
"comp_filter",
|
||||
ProcessorTypes.Filter,
|
||||
40,
|
||||
100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Filter]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Filter]["description"],
|
||||
{"processor_name": ProcessorTypes.Filter[0]}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def presenter_component():
|
||||
return WorkflowComponent(
|
||||
"comp_presenter",
|
||||
ProcessorTypes.Presenter,
|
||||
70,
|
||||
100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Presenter]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Presenter]["description"],
|
||||
{"processor_name": ProcessorTypes.Presenter[0]}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def components(producer_component, filter_component, presenter_component):
|
||||
return [producer_component, filter_component, presenter_component]
|
||||
|
||||
|
||||
def test_i_can_render_no_component(designer):
|
||||
actual = designer.__ft__()
|
||||
expected = Div(
|
||||
H1("Workflow Name"),
|
||||
# P("Drag components from the toolbox to the canvas to create your workflow."),
|
||||
Div(id=f"t_{designer.get_id()}"), # media + error message
|
||||
Div(id=f"d_{designer.get_id()}"), # designer container
|
||||
Div(cls="wkf-splitter"),
|
||||
Div(id=f"p_{designer.get_id()}"), # properties panel
|
||||
Script(f"bindWorkflowDesigner('{designer.get_id()}');"),
|
||||
id=designer.get_id(),
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_i_can_render_a_producer(designer, producer_component):
|
||||
component = producer_component
|
||||
actual = designer._mk_component(component)
|
||||
expected = Div(
|
||||
# input connection point
|
||||
Div(cls="wkf-connection-point wkf-input-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="input"
|
||||
),
|
||||
|
||||
# Component content
|
||||
Div(
|
||||
Span(COMPONENT_TYPES[component.type]["icon"]),
|
||||
H4(component.title),
|
||||
cls=Contains("wkf-component-content")
|
||||
),
|
||||
|
||||
# Output connection point
|
||||
Div(cls="wkf-connection-point wkf-output-point",
|
||||
data_component_id=component.id,
|
||||
data_point_type="output"
|
||||
),
|
||||
|
||||
cls=Contains("wkf-workflow-component"),
|
||||
style=f"left: {component.x}px; top: {component.y}px;",
|
||||
data_component_id=component.id,
|
||||
draggable="true"
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_i_can_render_a_connection(designer, components):
|
||||
designer._state.components = {c.id: c for c in components}
|
||||
connection = Connection("conn_1", "comp_producer", "comp_presenter")
|
||||
|
||||
actual = designer._mk_connection_svg(connection)
|
||||
path = "M 138 132 C 104.0 132, 104.0 132, 70 132"
|
||||
expected = f"""
|
||||
<svg class="wkf-connection-line" style="left: 0; top: 0; width: 100%; height: 100%;"
|
||||
data-from-id="{connection.from_id}" data-to-id="{connection.to_id}">
|
||||
<path d="{path}" class="wkf-connection-path-thick"/>
|
||||
<path d="{path}" class="wkf-connection-path" marker-end="url(#arrowhead)"/>
|
||||
|
||||
<defs>
|
||||
<marker id="arrowhead" markerWidth="10" markerHeight="7" refX="9" refY="3.5" orient="auto">
|
||||
<polygon points="0 0, 10 3.5, 0 7" class="wkf-connection-path-arrowhead"/>
|
||||
</marker>
|
||||
</defs>
|
||||
</svg>
|
||||
"""
|
||||
assert actual == expected
|
||||
|
||||
|
||||
def test_i_can_render_elements_with_connections(designer, components):
|
||||
designer._state.components = {c.id: c for c in components}
|
||||
designer._state.connections = [Connection("conn_1", components[0].id, components[1].id),
|
||||
Connection("conn_2", components[1].id, components[2].id)]
|
||||
actual = designer._mk_elements()
|
||||
expected = Div(
|
||||
NotStr('<svg class="wkf-connection-line"'), # connection 1
|
||||
NotStr('<svg class="wkf-connection-line"'), # connection 2
|
||||
Div(cls=Contains("wkf-workflow-component")),
|
||||
Div(cls=Contains("wkf-workflow-component")),
|
||||
Div(cls=Contains("wkf-workflow-component")),
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
164
tests/test_workflow_engine.py
Normal file
164
tests/test_workflow_engine.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from core.Expando import Expando
|
||||
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||
from workflow.engine import WorkflowEngine, DataProcessor, DataProducer, DataFilter, DataPresenter
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def engine():
|
||||
"""Fixture that provides a fresh WorkflowEngine instance for each test."""
|
||||
return WorkflowEngine()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def presenter_sample_data():
|
||||
return Expando({
|
||||
"id": 123,
|
||||
"title": "My Awesome Task",
|
||||
"creator": {
|
||||
"id": 1,
|
||||
"name": "John Doe",
|
||||
"email": "john.doe@example.com"
|
||||
},
|
||||
"assignee": {
|
||||
"id": 2,
|
||||
"name": "Jane Smith",
|
||||
"email": "jane.smith@example.com"
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
def test_empty_workflow_initialization(engine):
|
||||
"""Test that a new WorkflowEngine has no processors."""
|
||||
assert len(engine.processors) == 0
|
||||
|
||||
|
||||
def test_add_processor(engine):
|
||||
"""Test adding processors to the workflow."""
|
||||
mock_processor = MagicMock(spec=DataProcessor)
|
||||
engine.add_processor(mock_processor)
|
||||
assert len(engine.processors) == 1
|
||||
assert engine.processors[0] is mock_processor
|
||||
|
||||
|
||||
def test_run_empty_workflow(engine):
|
||||
"""Test that running an empty workflow raises ValueError."""
|
||||
with pytest.raises(ValueError, match="No processors in the pipeline"):
|
||||
list(engine.run())
|
||||
|
||||
|
||||
def test_run_without_producer_first(engine):
|
||||
"""Test that running a workflow without a DataProducer first raises ValueError."""
|
||||
mock_filter = MagicMock(spec=DataFilter)
|
||||
engine.add_processor(mock_filter)
|
||||
|
||||
with pytest.raises(ValueError, match="First processor must be a DataProducer"):
|
||||
list(engine.run())
|
||||
|
||||
|
||||
def test_run_simple_workflow(engine):
|
||||
"""Test running a workflow with just a producer."""
|
||||
|
||||
class SimpleProducer(DataProducer):
|
||||
def emit(self, data=None):
|
||||
yield 1
|
||||
yield 2
|
||||
yield 3
|
||||
|
||||
engine.add_processor(SimpleProducer())
|
||||
result = list(engine.run())
|
||||
assert result == [1, 2, 3]
|
||||
|
||||
|
||||
def test_process_single_item(engine):
|
||||
"""Test the internal _process_single_item method."""
|
||||
mock_processor = MagicMock(spec=DataProcessor)
|
||||
mock_processor.process.return_value = iter([42])
|
||||
|
||||
engine.add_processor(mock_processor)
|
||||
result = list(engine._process_single_item(10, 0)) # 10 is a dummy value for the first item
|
||||
|
||||
mock_processor.process.assert_called_once_with(10)
|
||||
assert result == [42]
|
||||
|
||||
|
||||
def test_run_to_list(engine):
|
||||
"""Test run_to_list returns all results as a list."""
|
||||
|
||||
class SimpleProducer(DataProducer):
|
||||
def emit(self, data=None):
|
||||
yield 1
|
||||
yield 2
|
||||
|
||||
engine.add_processor(SimpleProducer())
|
||||
result = engine.run_to_list()
|
||||
assert result == [1, 2]
|
||||
assert isinstance(result, list)
|
||||
|
||||
|
||||
def test_complex_workflow():
|
||||
"""Test a complex workflow with multiple processors."""
|
||||
|
||||
# Define test processors
|
||||
class NumberProducer(DataProducer):
|
||||
def emit(self, data=None):
|
||||
for i in range(1, 6): # 1 to 5
|
||||
yield i
|
||||
|
||||
class EvenFilter(DataFilter):
|
||||
def filter(self, data):
|
||||
return data % 2 == 0 # Keep even numbers
|
||||
|
||||
class Doubler(DataPresenter):
|
||||
def present(self, data):
|
||||
return data * 2
|
||||
|
||||
# Create and run workflow
|
||||
workflow = WorkflowEngine()
|
||||
workflow.add_processor(NumberProducer())
|
||||
workflow.add_processor(EvenFilter())
|
||||
workflow.add_processor(Doubler())
|
||||
|
||||
result = workflow.run_to_list()
|
||||
assert result == [4, 8] # Even numbers (2, 4) doubled
|
||||
|
||||
|
||||
def test_branching_workflow(engine):
|
||||
"""Test a workflow with branching outputs."""
|
||||
|
||||
class BranchingProcessor(DataProducer):
|
||||
def emit(self, data=None):
|
||||
yield data
|
||||
yield data * 10
|
||||
|
||||
class SimpleProducer(DataProducer):
|
||||
def emit(self, data=None):
|
||||
yield 1
|
||||
yield 2
|
||||
|
||||
engine.add_processor(SimpleProducer())
|
||||
engine.add_processor(BranchingProcessor())
|
||||
|
||||
result = engine.run_to_list()
|
||||
assert result == [1, 10, 2, 20]
|
||||
|
||||
|
||||
def test_presenter_i_can_use_wildcards(presenter_sample_data):
|
||||
presenter1 = DefaultDataPresenter("component_id", "id, creator.*")
|
||||
res = presenter1.present(presenter_sample_data).as_dict()
|
||||
assert res == {"id": 123, "creator.id": 1, "creator.name": "John Doe", "creator.email": "john.doe@example.com"}
|
||||
|
||||
|
||||
def test_presenter_i_can_rename_wildcard_with_specific_override(presenter_sample_data):
|
||||
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, creator.name=author_name")
|
||||
res = presenter1.present(presenter_sample_data).as_dict()
|
||||
assert res == {"id": 1, "email": "john.doe@example.com", "author_name": "John Doe"}
|
||||
|
||||
|
||||
def test_presenter_i_can_manage_collisions(presenter_sample_data):
|
||||
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, assignee.*=*")
|
||||
with pytest.raises(ValueError, match="Collision detected for field"):
|
||||
presenter1.present(presenter_sample_data).as_dict()
|
||||
215
tests/test_workflow_engine_default_data_presenter.py
Normal file
215
tests/test_workflow_engine_default_data_presenter.py
Normal file
@@ -0,0 +1,215 @@
|
||||
import pytest
|
||||
|
||||
from core.Expando import Expando
|
||||
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||
|
||||
|
||||
def test_i_can_present_static_mappings():
|
||||
mappings_def = "field1 = renamed_1 , field2 "
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"renamed_1": "value1", "field2": "value2"}) # field3 is removed
|
||||
|
||||
|
||||
def test_i_can_present_implicit_renaming():
|
||||
mappings_def = "root.field1="
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1"}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"field1": "value1"}
|
||||
|
||||
|
||||
def test_the_latter_mappings_take_precedence():
|
||||
mappings_def = "field1 = renamed_1 , field1 "
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"field1": "value1"}) # field3 is removed
|
||||
|
||||
|
||||
def test_i_can_present_static_mappings_with_sub_fields():
|
||||
mappings_def = "root.field1 = renamed_1 , root.field2, root.sub_field.field3, root.sub_field.field4=renamed4 "
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"renamed_1": "value1",
|
||||
"root.field2": "value2",
|
||||
"root.sub_field.field3": "value3",
|
||||
"renamed4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings():
|
||||
mappings_def = "*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_for_complex_data():
|
||||
mappings_def = "*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}
|
||||
},
|
||||
"field5": "value5"}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == as_dict
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_with_sub_fields():
|
||||
mappings_def = "root.sub_field.*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"root.sub_field.field3": "value3",
|
||||
"root.sub_field.field4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_with_sub_fields_and_renames():
|
||||
mappings_def = "root.sub_field.*=*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"field3": "value3",
|
||||
"field4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_with_sub_fields_and_implicit_renames():
|
||||
mappings_def = "root.sub_field.*="
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"field3": "value3",
|
||||
"field4": "value4"}
|
||||
|
||||
|
||||
def test_i_can_present_dynamic_mappings_and_rename_them():
|
||||
mappings_def = "*=*" # does not really have effects as '*' only goes down one level
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root1": {"field1": "value1",
|
||||
"field2": "value2"},
|
||||
"root2": {"field3": "value3",
|
||||
"field4": "value4"}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == as_dict
|
||||
|
||||
|
||||
def test_i_can_present_static_and_dynamic_mappings():
|
||||
mappings_def = "root.field1 = renamed_1, root.sub_field.*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert isinstance(actual, Expando)
|
||||
assert actual.as_dict() == {"renamed_1": "value1",
|
||||
"root.sub_field.field3": "value3",
|
||||
"root.sub_field.field4": "value4"}
|
||||
|
||||
|
||||
def test_another_example_of_static_and_dynamic_mappings():
|
||||
mappings_def = "* , field1 = renamed_1"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
actual = presenter.present(data)
|
||||
|
||||
assert actual == Expando({"renamed_1": "value1", "field2": "value2", "field3": "value3"}) # field3 is removed
|
||||
|
||||
|
||||
def test_i_can_detect_conflict_when_dynamically_renaming_a_field():
|
||||
mappings_def = "root_1.*=*, root_2.*=*"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root_1": {"field1": "value1",
|
||||
"field2": "value2"},
|
||||
"root_2": {"field1": "value1",
|
||||
"field2": "value2"}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
with pytest.raises(ValueError) as e:
|
||||
presenter.present(data)
|
||||
|
||||
assert str(e.value) == "Collision detected for field 'field1'. It is mapped from both 'root_1.*=*' and 'root_2.*=*'."
|
||||
|
||||
|
||||
def test_i_can_detect_declaration_error():
|
||||
mappings_def = "field1 ,, field2"
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||
|
||||
with pytest.raises(ValueError) as e:
|
||||
presenter.present(data)
|
||||
|
||||
|
||||
def test_i_can_detect_dynamic_error_declaration():
|
||||
mappings_def = "root.field1.*" # field1 is not an object
|
||||
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||
as_dict = {"root": {"field1": "value1",
|
||||
"field2": "value2",
|
||||
"sub_field": {"field3": "value3",
|
||||
"field4": "value4"
|
||||
}}}
|
||||
data = Expando(as_dict)
|
||||
|
||||
with pytest.raises(ValueError) as e:
|
||||
presenter.present(data)
|
||||
78
tests/test_workflow_engine_integration.py
Normal file
78
tests/test_workflow_engine_integration.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
from core.Expando import Expando
|
||||
|
||||
from core.jira import JiraRequestTypes
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from workflow.engine import JiraDataProducer, TableDataProducer
|
||||
|
||||
JIRA_IMPORT_PATH = "workflow.engine.Jira"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_jira_search_1():
|
||||
with patch(JIRA_IMPORT_PATH) as mock_jira_class:
|
||||
mock_jira_instance = Mock()
|
||||
mock_jira_instance.search.return_value = [
|
||||
Expando({
|
||||
"key": "TEST-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue",
|
||||
"status": {"name": "Open"},
|
||||
"assignee": {"displayName": "Test User"}
|
||||
}
|
||||
})
|
||||
]
|
||||
mock_jira_class.return_value = mock_jira_instance
|
||||
|
||||
yield mock_jira_instance # This allows us to access the mock instance in our tests
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_jira_error():
|
||||
with patch(JIRA_IMPORT_PATH) as mock_jira_class:
|
||||
mock_jira_instance = Mock()
|
||||
mock_jira_instance.search.side_effect = Exception("Jira API Error")
|
||||
mock_jira_class.return_value = mock_jira_instance
|
||||
|
||||
yield mock_jira_instance
|
||||
|
||||
|
||||
def get_jira_patch(jp: JiraDataProducer):
|
||||
# Create and configure the mock instance
|
||||
mock_jira_instance = Mock()
|
||||
if jp.request_type == JiraRequestTypes.Search.value:
|
||||
mock_jira_instance.search.return_value = [
|
||||
Expando({
|
||||
"key": "TEST-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue",
|
||||
"status": {"name": "Open"},
|
||||
"assignee": {"displayName": "Test User"}
|
||||
}
|
||||
})
|
||||
]
|
||||
else:
|
||||
raise ValueError("Hello Kodjo. Unsupported request type !")
|
||||
|
||||
return patch(JIRA_IMPORT_PATH, return_value=mock_jira_instance)
|
||||
|
||||
|
||||
def jira_producer(session, request_type, request, fields=None):
|
||||
return JiraDataProducer(session,
|
||||
SettingsManager(MemoryDbEngine()),
|
||||
"component_id",
|
||||
request_type=request_type,
|
||||
request=request,
|
||||
fields=fields)
|
||||
|
||||
|
||||
def test_i_can_produce_jira_search(session):
|
||||
data = {}
|
||||
jp = jira_producer(session, JiraRequestTypes.Search, "project=key1")
|
||||
|
||||
with get_jira_patch(jp):
|
||||
res = list(jp.process(data))
|
||||
|
||||
assert len(res) == 1
|
||||
assert res[0].key == "TEST-1"
|
||||
239
tests/test_workflow_player.py
Normal file
239
tests/test_workflow_player.py
Normal file
@@ -0,0 +1,239 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pandas as pd
|
||||
import pytest
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.WorkflowDesigner import COMPONENT_TYPES, WorkflowDesigner
|
||||
from components.workflows.components.WorkflowPlayer import WorkflowPlayer, WorkflowsPlayerError
|
||||
from components.workflows.constants import ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowComponent, Connection, ComponentState, WorkflowsDesignerSettings
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from my_mocks import tabs_manager
|
||||
from workflow.engine import DataProcessorError
|
||||
|
||||
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
|
||||
TEST_WORKFLOW_PLAYER_ID = "workflow_player_id"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_undo_redo(session):
|
||||
# Create a mock UndoRedo instance
|
||||
undo_redo = MagicMock(spec=UndoRedo)
|
||||
|
||||
# Store original get method
|
||||
original_get = InstanceManager.get
|
||||
|
||||
def mock_get(sess, instance_id, *args, **kwargs):
|
||||
if instance_id == UndoRedo.create_component_id(sess):
|
||||
return undo_redo
|
||||
return original_get(sess, instance_id, *args, **kwargs)
|
||||
|
||||
# Replace get method with our mock
|
||||
InstanceManager.get = mock_get
|
||||
|
||||
yield undo_redo
|
||||
|
||||
# Restore original get method after test
|
||||
InstanceManager.get = original_get
|
||||
|
||||
@pytest.fixture
|
||||
def settings_manager():
|
||||
return SettingsManager(MemoryDbEngine())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def designer(session, settings_manager, tabs_manager):
|
||||
components = [
|
||||
WorkflowComponent(
|
||||
"comp_producer",
|
||||
ProcessorTypes.Producer,
|
||||
10, 100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Producer]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Producer]["description"],
|
||||
{"processor_name": "Repository"}
|
||||
),
|
||||
WorkflowComponent(
|
||||
"comp_filter",
|
||||
ProcessorTypes.Filter,
|
||||
40, 100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Filter]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Filter]["description"],
|
||||
{"processor_name": "Default"}
|
||||
),
|
||||
WorkflowComponent(
|
||||
"comp_presenter",
|
||||
ProcessorTypes.Presenter,
|
||||
70, 100,
|
||||
COMPONENT_TYPES[ProcessorTypes.Presenter]["title"],
|
||||
COMPONENT_TYPES[ProcessorTypes.Presenter]["description"],
|
||||
{"processor_name": "Default"}
|
||||
)
|
||||
]
|
||||
connections = [
|
||||
Connection("conn_1", "comp_producer", "comp_filter"),
|
||||
Connection("conn_2", "comp_filter", "comp_presenter"),
|
||||
]
|
||||
|
||||
designer = WorkflowDesigner(
|
||||
session,
|
||||
TEST_WORKFLOW_DESIGNER_ID,
|
||||
settings_manager,
|
||||
tabs_manager,
|
||||
"Workflow Designer",
|
||||
WorkflowsDesignerSettings(workflow_name="Test Workflow"),
|
||||
{"height": 500, "width": 800}
|
||||
)
|
||||
|
||||
designer._state.components = {c.id: c for c in components}
|
||||
designer._state.connections = connections
|
||||
|
||||
return designer
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def player(session, settings_manager, tabs_manager, designer):
|
||||
"""
|
||||
Sets up a standard WorkflowPlayer instance with a 3-component linear workflow.
|
||||
A helper method 'get_dataframe' is attached for easier testing.
|
||||
"""
|
||||
|
||||
return WorkflowPlayer(session=session,
|
||||
_id=TEST_WORKFLOW_PLAYER_ID,
|
||||
settings_manager=settings_manager,
|
||||
tabs_manager=tabs_manager,
|
||||
designer=designer,
|
||||
boundaries={"height": 500, "width": 800}
|
||||
)
|
||||
|
||||
|
||||
def test_run_successful_workflow(player, mocker):
|
||||
"""
|
||||
Tests the "happy path" where the workflow runs successfully from start to finish.
|
||||
"""
|
||||
# 1. Arrange: Mock a successful engine run
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.has_error = False
|
||||
mock_result_data = [
|
||||
MagicMock(as_dict=lambda: {'col_a': 1, 'col_b': 'x'}),
|
||||
MagicMock(as_dict=lambda: {'col_a': 2, 'col_b': 'y'})
|
||||
]
|
||||
mock_engine.run_to_list.return_value = mock_result_data
|
||||
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check for success state and correct data
|
||||
assert not player.has_error
|
||||
assert player.global_error is None
|
||||
for component_id, state in player.runtime_states.items():
|
||||
assert state.state == ComponentState.SUCCESS
|
||||
|
||||
player._get_engine.assert_called_once()
|
||||
mock_engine.run_to_list.assert_called_once()
|
||||
|
||||
expected_df = pd.DataFrame([row.as_dict() for row in mock_result_data])
|
||||
assert_frame_equal(player.get_dataframe(), expected_df)
|
||||
|
||||
|
||||
def test_run_with_cyclical_dependency(player, mocker):
|
||||
"""
|
||||
Tests that a workflow with a cycle is detected and handled before execution.
|
||||
"""
|
||||
# 1. Arrange: Introduce a cycle and spy on engine creation
|
||||
player._designer._state.connections.append(Connection("conn_3", "comp_presenter", "comp_producer"))
|
||||
spy_get_engine = mocker.spy(player, '_get_engine')
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check for the specific cycle error
|
||||
assert player.has_error
|
||||
assert "Workflow configuration error: A cycle was detected" in player.global_error
|
||||
spy_get_engine.assert_not_called()
|
||||
|
||||
|
||||
def test_run_with_component_initialization_failure(player, mocker):
|
||||
"""
|
||||
Tests that an error during a component's initialization is handled correctly.
|
||||
"""
|
||||
# 1. Arrange: Make the engine creation fail for a specific component
|
||||
failing_component_id = "comp_filter"
|
||||
error = ValueError("Missing a required property")
|
||||
mocker.patch.object(player, '_get_engine', side_effect=WorkflowsPlayerError(failing_component_id, error))
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check that the specific component is marked as failed
|
||||
assert player.has_error
|
||||
assert f"Failed to init component '{failing_component_id}'" in player.global_error
|
||||
assert player.runtime_states[failing_component_id].state == ComponentState.FAILURE
|
||||
assert str(error) in player.runtime_states[failing_component_id].error_message
|
||||
assert player.runtime_states["comp_producer"].state == ComponentState.NOT_RUN
|
||||
|
||||
|
||||
def test_run_with_failure_in_middle_component(player, mocker):
|
||||
"""
|
||||
Tests failure in a middle component updates all component states correctly.
|
||||
"""
|
||||
# 1. Arrange: Mock an engine that fails at the filter component
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.has_error = True
|
||||
failing_component_id = "comp_filter"
|
||||
error = RuntimeError("Data processing failed unexpectedly")
|
||||
mock_engine.errors = {failing_component_id: DataProcessorError(failing_component_id, error)}
|
||||
mock_engine.run_to_list.return_value = []
|
||||
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Check the state of each component in the chain
|
||||
assert player.has_error
|
||||
assert f"Error in component 'comp_filter':" in player.global_error
|
||||
assert player.runtime_states["comp_producer"].state == ComponentState.SUCCESS
|
||||
assert player.runtime_states[failing_component_id].state == ComponentState.FAILURE
|
||||
assert str(error) in player.runtime_states[failing_component_id].error_message
|
||||
assert player.runtime_states["comp_presenter"].state == ComponentState.NOT_RUN
|
||||
|
||||
|
||||
def test_run_with_empty_workflow(player, mocker):
|
||||
"""
|
||||
Tests that running a workflow with no components completes without errors.
|
||||
"""
|
||||
# 1. Arrange: Clear components and connections
|
||||
player._designer._state.components = {}
|
||||
player._designer._state.connections = []
|
||||
spy_get_engine = mocker.spy(player, '_get_engine')
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: Ensure it finishes cleanly with no data
|
||||
assert not player.has_error
|
||||
assert player.global_error == 'No connections defined.'
|
||||
spy_get_engine.assert_not_called()
|
||||
|
||||
|
||||
def test_run_with_global_engine_error(player, mocker):
|
||||
"""
|
||||
Tests a scenario where the engine reports a global error not tied to a specific component.
|
||||
"""
|
||||
# 1. Arrange: Mock a global engine failure
|
||||
mock_engine = MagicMock()
|
||||
mock_engine.has_error = True
|
||||
mock_engine.errors = {} # No specific component error
|
||||
mock_engine.global_error = "A simulated global engine failure"
|
||||
mock_engine.run_to_list.return_value = []
|
||||
mocker.patch.object(player, '_get_engine', return_value=mock_engine)
|
||||
|
||||
# 2. Act
|
||||
player.run()
|
||||
|
||||
# 3. Assert: The player should report the global error from the engine
|
||||
assert player.has_error
|
||||
assert player.global_error == mock_engine.global_error
|
||||
150
tests/test_workflows.py
Normal file
150
tests/test_workflows.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.form.components.MyForm import FormField, MyForm
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.Workflows import Workflows
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, div_icon, search_elements_by_name, Contains
|
||||
from my_mocks import tabs_manager
|
||||
|
||||
TEST_WORKFLOWS_ID = "testing_repositories_id"
|
||||
|
||||
boundaries = {"height": 500, "width": 800}
|
||||
|
||||
@pytest.fixture
|
||||
def workflows(session, tabs_manager):
|
||||
return Workflows(session=session, _id=TEST_WORKFLOWS_ID,
|
||||
settings_manager=SettingsManager(engine=MemoryDbEngine()),
|
||||
tabs_manager=tabs_manager)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_undo_redo(session):
|
||||
# Create a mock UndoRedo instance
|
||||
undo_redo = MagicMock(spec=UndoRedo)
|
||||
|
||||
# Store original get method
|
||||
original_get = InstanceManager.get
|
||||
|
||||
def mock_get(sess, instance_id, *args, **kwargs):
|
||||
if instance_id == UndoRedo.create_component_id(sess):
|
||||
return undo_redo
|
||||
return original_get(sess, instance_id, *args, **kwargs)
|
||||
|
||||
# Replace get method with our mock
|
||||
InstanceManager.get = mock_get
|
||||
|
||||
yield undo_redo
|
||||
|
||||
# Restore original get method after test
|
||||
InstanceManager.get = original_get
|
||||
|
||||
|
||||
def test_render_no_workflow(workflows):
|
||||
actual = workflows.__ft__()
|
||||
expected = Div(
|
||||
Div(cls="divider"),
|
||||
Div(
|
||||
Div("Workflows"),
|
||||
div_icon("add"), # icon to add a new workflow
|
||||
cls="flex"
|
||||
),
|
||||
Div(id=f"w_{workflows.get_id()}", ), # list of workflow
|
||||
id=workflows.get_id(),
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_render_with_workflows_defined(workflows):
|
||||
workflows.db.add_workflow("workflow 1")
|
||||
workflows.db.add_workflow("workflow 2")
|
||||
|
||||
actual = workflows.__ft__()
|
||||
expected = Div(
|
||||
Div(cls="divider"),
|
||||
Div(), # title + icon 'Add'
|
||||
Div(
|
||||
Div("workflow 1"),
|
||||
Div("workflow 2"),
|
||||
id=f"w_{workflows.get_id()}"
|
||||
), # list of workflows
|
||||
id=workflows.get_id(),
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_i_can_see_selected_workflow(workflows):
|
||||
workflows.db.add_workflow("workflow 1")
|
||||
workflows.db.add_workflow("workflow 2")
|
||||
workflows.db.select_workflow("workflow 2")
|
||||
|
||||
actual = workflows.__ft__()
|
||||
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"w_{workflows.get_id()}"})[0]
|
||||
|
||||
expected = Div(
|
||||
Div("workflow 1"),
|
||||
Div(Div("workflow 2"), cls=Contains("mmt-selected")),
|
||||
id=f"w_{workflows.get_id()}"
|
||||
)
|
||||
|
||||
assert matches(to_compare, expected)
|
||||
|
||||
|
||||
def test_i_can_request_for_a_new_workflow(workflows, tabs_manager):
|
||||
res = workflows.request_new_workflow()
|
||||
|
||||
tabs_manager.request_new_tab_id.assert_called_once()
|
||||
assert "new_tab_id" in res.tabs
|
||||
tab_def = res.tabs["new_tab_id"]
|
||||
assert tab_def[0] == "Add Workflow"
|
||||
|
||||
content = tab_def[1]
|
||||
assert isinstance(content, MyForm)
|
||||
assert content.title == "Add Workflow"
|
||||
assert content.fields == [FormField("name", 'Workflow Name', 'input')]
|
||||
|
||||
|
||||
def test_i_can_add_a_new_workflow(workflows, tabs_manager):
|
||||
res = workflows.request_new_workflow()
|
||||
tab_id = list(res.tabs.keys())[0]
|
||||
|
||||
actual = workflows.add_new_workflow(tab_id, "Not relevant here", "New Workflow", boundaries)
|
||||
|
||||
expected = (
|
||||
Div(
|
||||
Div("New Workflow"),
|
||||
id=f"w_{workflows.get_id()}"
|
||||
), # list of workflows
|
||||
Div(), # Workflow Designer embedded in the tab
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
|
||||
# check that the workflow was added
|
||||
assert workflows.db.exists_workflow("New Workflow")
|
||||
|
||||
|
||||
def test_i_can_select_a_workflow(workflows):
|
||||
workflows.add_new_workflow("tab_id_1", "Not relevant", "workflow 1", boundaries)
|
||||
workflows.add_new_workflow("tab_id_2", "Not relevant", "workflow 2", boundaries)
|
||||
workflows.add_new_workflow("tab_id_3", "Not relevant", "workflow 3", boundaries)
|
||||
|
||||
actual = workflows.show_workflow("workflow 2", boundaries)
|
||||
|
||||
expected = (
|
||||
Div(
|
||||
Div("workflow 1"),
|
||||
Div(Div("workflow 2"), cls=Contains("mmt-selected")),
|
||||
Div("workflow 3"),
|
||||
id=f"w_{workflows.get_id()}"
|
||||
), # list of workflows
|
||||
Div(), # Workflow Designer embedded in the tab
|
||||
)
|
||||
|
||||
assert matches(actual, expected)
|
||||
152
tests/tests_workflows_db_manager.py
Normal file
152
tests/tests_workflows_db_manager.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from components.workflows.constants import WORKFLOWS_DB_ENTRY
|
||||
from components.workflows.db_management import WorkflowsDbManager, WorkflowsSettings
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
|
||||
USER_EMAIL = "test@mail.com"
|
||||
USER_ID = "test_user"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def session():
|
||||
return {"user_id": USER_ID, "user_email": USER_EMAIL}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def settings_manager():
|
||||
return SettingsManager(engine=MemoryDbEngine())
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflows_db_manager(session, settings_manager):
|
||||
return WorkflowsDbManager(session=session, settings_manager=settings_manager)
|
||||
|
||||
|
||||
def test_add_workflow(workflows_db_manager):
|
||||
# Test adding a new workflow
|
||||
assert workflows_db_manager.add_workflow("workflow1") is True
|
||||
|
||||
# Verify workflow was added
|
||||
workflows = workflows_db_manager.get_workflows()
|
||||
assert "workflow1" in workflows
|
||||
assert len(workflows) == 1
|
||||
|
||||
|
||||
def test_add_workflow_empty_name(workflows_db_manager):
|
||||
# Test adding a workflow with empty name raises ValueError
|
||||
with pytest.raises(ValueError, match="Workflow name cannot be empty."):
|
||||
workflows_db_manager.add_workflow("")
|
||||
|
||||
|
||||
def test_add_workflow_duplicate(workflows_db_manager):
|
||||
# Add a workflow
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
|
||||
# Test adding duplicate workflow raises ValueError
|
||||
with pytest.raises(ValueError, match="Workflow 'workflow1' already exists."):
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
|
||||
|
||||
def test_get_workflow(workflows_db_manager):
|
||||
# Add a workflow
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
|
||||
# Test getting the workflow
|
||||
workflow = workflows_db_manager.get_workflow("workflow1")
|
||||
assert workflow == "workflow1"
|
||||
|
||||
|
||||
def test_get_workflow_empty_name(workflows_db_manager):
|
||||
# Test getting a workflow with empty name raises ValueError
|
||||
with pytest.raises(ValueError, match="Workflow name cannot be empty."):
|
||||
workflows_db_manager.get_workflow("")
|
||||
|
||||
|
||||
def test_get_workflow_nonexistent(workflows_db_manager):
|
||||
# Test getting a non-existent workflow raises ValueError
|
||||
with pytest.raises(ValueError, match="Workflow 'nonexistent' does not exist."):
|
||||
workflows_db_manager.get_workflow("nonexistent")
|
||||
|
||||
|
||||
def test_remove_workflow(workflows_db_manager):
|
||||
# Add a workflow
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
|
||||
# Test removing the workflow
|
||||
assert workflows_db_manager.remove_workflow("workflow1") is True
|
||||
|
||||
# Verify workflow was removed
|
||||
assert len(workflows_db_manager.get_workflows()) == 0
|
||||
|
||||
|
||||
def test_remove_workflow_empty_name(workflows_db_manager):
|
||||
# Test removing a workflow with empty name raises ValueError
|
||||
with pytest.raises(ValueError, match="Workflow name cannot be empty."):
|
||||
workflows_db_manager.remove_workflow("")
|
||||
|
||||
|
||||
def test_remove_workflow_nonexistent(workflows_db_manager):
|
||||
# Test removing a non-existent workflow raises ValueError
|
||||
with pytest.raises(ValueError, match="workflow 'nonexistent' does not exist."):
|
||||
workflows_db_manager.remove_workflow("nonexistent")
|
||||
|
||||
|
||||
def test_exists_workflow(workflows_db_manager):
|
||||
# Add a workflow
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
|
||||
# Test workflow exists
|
||||
assert workflows_db_manager.exists_workflow("workflow1") is True
|
||||
|
||||
# Test non-existent workflow
|
||||
assert workflows_db_manager.exists_workflow("nonexistent") is False
|
||||
|
||||
|
||||
def test_exists_workflow_empty_name(workflows_db_manager):
|
||||
# Test checking existence of workflow with empty name raises ValueError
|
||||
with pytest.raises(ValueError, match="workflow name cannot be empty."):
|
||||
workflows_db_manager.exists_workflow("")
|
||||
|
||||
|
||||
def test_get_workflows(workflows_db_manager):
|
||||
# Initially, no workflows
|
||||
assert len(workflows_db_manager.get_workflows()) == 0
|
||||
|
||||
# Add workflows
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
workflows_db_manager.add_workflow("workflow2")
|
||||
|
||||
# Test getting all workflows
|
||||
workflows = workflows_db_manager.get_workflows()
|
||||
assert "workflow1" in workflows
|
||||
assert "workflow2" in workflows
|
||||
assert len(workflows) == 2
|
||||
|
||||
|
||||
def test_select_workflow(workflows_db_manager):
|
||||
# Add a workflow
|
||||
workflows_db_manager.add_workflow("workflow1")
|
||||
|
||||
# Select the workflow
|
||||
workflows_db_manager.select_workflow("workflow1")
|
||||
|
||||
# Verify workflow was selected
|
||||
assert workflows_db_manager.get_selected_workflow() == "workflow1"
|
||||
|
||||
|
||||
def test_get_selected_workflow_none(workflows_db_manager):
|
||||
# Initially, no selected workflow
|
||||
assert workflows_db_manager.get_selected_workflow() is None
|
||||
|
||||
|
||||
def test_get_settings_default(workflows_db_manager, session, settings_manager):
|
||||
# Test _get_settings returns default settings when none exist
|
||||
with patch.object(settings_manager, 'load', return_value=WorkflowsSettings()) as mock_load:
|
||||
settings = workflows_db_manager._get_settings()
|
||||
mock_load.assert_called_once_with(session, WORKFLOWS_DB_ENTRY, default=WorkflowsSettings())
|
||||
assert isinstance(settings, WorkflowsSettings)
|
||||
assert settings.workflows == []
|
||||
assert settings.selected_workflow is None
|
||||
Reference in New Issue
Block a user