Compare commits
27 Commits
72f5f30da6
...
AddingWorf
| Author | SHA1 | Date | |
|---|---|---|---|
| 3bd503d4d2 | |||
| 292a477298 | |||
| eb8d6a99a2 | |||
| 765c715d63 | |||
| e90e7b01dd | |||
| fe5668fbed | |||
| 63058ef4a9 | |||
| 957a92f903 | |||
| 33970c9c97 | |||
| 8eca1da3ca | |||
| 97a5989390 | |||
| e73709c859 | |||
| f0d98d23ff | |||
| 64e7c44a7d | |||
| 3a1870a160 | |||
| c2fcfbb2ab | |||
| e74639c042 | |||
| badc2e28b0 | |||
| 4ac3eb2dfa | |||
| 2bd998fe69 | |||
| c694f42c07 | |||
| 6949bb2814 | |||
| 14f079d5f9 | |||
| 3ca23449e4 | |||
| a6f765c624 | |||
| 43e7dd5f00 | |||
| 37c91d0d5d |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -11,6 +11,10 @@ tests/TestDBEngineRoot
|
||||
.sesskey
|
||||
tools.db
|
||||
.mytools_db
|
||||
.idea/MyManagingTools.iml
|
||||
.idea/misc.xml
|
||||
.idea_bak
|
||||
**/*.prof
|
||||
|
||||
# Created by .ignore support plugin (hsz.mobi)
|
||||
### Python template
|
||||
@@ -196,4 +200,4 @@ fabric.properties
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
# idea folder, uncomment if you don't need it
|
||||
# .idea
|
||||
# .idea
|
||||
|
||||
3
.idea/.gitignore
generated
vendored
3
.idea/.gitignore
generated
vendored
@@ -1,3 +0,0 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
11
.idea/MyManagingTools.iml
generated
11
.idea/MyManagingTools.iml
generated
@@ -1,11 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.12 (MyManagingTools)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
5
.idea/codeStyles/codeStyleConfig.xml
generated
5
.idea/codeStyles/codeStyleConfig.xml
generated
@@ -1,5 +0,0 @@
|
||||
<component name="ProjectCodeStyleConfiguration">
|
||||
<state>
|
||||
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
|
||||
</state>
|
||||
</component>
|
||||
6
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
6
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyInitNewSignatureInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||
</profile>
|
||||
</component>
|
||||
7
.idea/misc.xml
generated
7
.idea/misc.xml
generated
@@ -1,7 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.12 (MyManagingTools)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.12 (MyManagingTools)" project-jdk-type="Python SDK" />
|
||||
</project>
|
||||
2
.idea/vcs.xml
generated
2
.idea/vcs.xml
generated
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
4
Makefile
4
Makefile
@@ -18,7 +18,9 @@ clean:
|
||||
rm -rf Untitled*.ipynb
|
||||
rm -rf .ipynb_checkpoints
|
||||
rm -rf src/tools.db
|
||||
rm -rf src/*.out
|
||||
rm -rf src/*.prof
|
||||
find . -name '.sesskey' -exec rm -rf {} +
|
||||
find . -name '.pytest_cache' -exec rm -rf {} +
|
||||
find . -name '__pycache__' -exec rm -rf {} +
|
||||
find . -name 'debug.txt' -exec rm -rf {}
|
||||
find . -name 'debug.txt' -exec rm -rf {}
|
||||
|
||||
@@ -14,7 +14,7 @@ python main.py
|
||||
```shell
|
||||
docker-compose up -d
|
||||
```
|
||||
The application will be accessible on port 8000 (or whatever port you configured).
|
||||
The application will be accessible on port 8001 (if the docker compose file was not changed !).
|
||||
|
||||
2. **Initialize the Mistral model** (first run):
|
||||
```shell
|
||||
@@ -34,4 +34,11 @@ docker-compose down
|
||||
1. **Rebuild**:
|
||||
```shell
|
||||
docker-compose build
|
||||
```
|
||||
|
||||
# Profiling
|
||||
```shell
|
||||
cd src
|
||||
python -m cProfile -o profile.out main.py
|
||||
snakeviz profile.out # 'pip install snakeviz' if snakeviz is not installed
|
||||
```
|
||||
@@ -2,6 +2,7 @@ annotated-types==0.7.0
|
||||
anyio==4.6.0
|
||||
apsw==3.50.2.0
|
||||
apswutils==0.1.0
|
||||
Arpeggio==2.0.2
|
||||
beautifulsoup4==4.12.3
|
||||
certifi==2024.8.30
|
||||
charset-normalizer==3.4.2
|
||||
|
||||
@@ -25,11 +25,19 @@ function bindTooltipsWithDelegation() {
|
||||
|
||||
// Add a single mouseenter and mouseleave listener to the parent element
|
||||
element.addEventListener("mouseenter", (event) => {
|
||||
//console.debug("Entering element", event.target)
|
||||
|
||||
const cell = event.target.closest("[data-tooltip]");
|
||||
if (!cell) return;
|
||||
if (!cell) {
|
||||
// console.debug(" No 'data-tooltip' attribute found. Stopping.");
|
||||
return;
|
||||
}
|
||||
|
||||
const no_tooltip = element.hasAttribute("mmt-no-tooltip");
|
||||
if (no_tooltip) return;
|
||||
if (no_tooltip) {
|
||||
// console.debug(" Attribute 'mmt-no-tooltip' found. Cancelling.");
|
||||
return;
|
||||
}
|
||||
|
||||
const content = cell.querySelector(".truncate") || cell;
|
||||
const isOverflowing = content.scrollWidth > content.clientWidth;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from core.utils import get_user_id
|
||||
from core.utils import get_user_id, get_unique_id
|
||||
|
||||
|
||||
class BaseComponent:
|
||||
@@ -51,3 +51,12 @@ class BaseComponentSingleton(BaseComponent):
|
||||
@classmethod
|
||||
def create_component_id(cls, session):
|
||||
return f"{cls.COMPONENT_INSTANCE_ID}{session['user_id']}"
|
||||
|
||||
|
||||
class BaseComponentMultipleInstance(BaseComponent):
|
||||
COMPONENT_INSTANCE_ID = None
|
||||
|
||||
@classmethod
|
||||
def create_component_id(cls, session):
|
||||
component_id = cls.COMPONENT_INSTANCE_ID or cls.__name__
|
||||
return get_unique_id(f"{component_id}{session['user_id']}")
|
||||
|
||||
@@ -136,3 +136,10 @@ def post(session, _id: str, state: str, args: str = None):
|
||||
logger.debug(f"Entering on_state_changed with args {_id=}, {state=}, {args=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.manage_state_changed(state, args)
|
||||
|
||||
|
||||
@rt(Routes.GetPage)
|
||||
def get(session, _id: str, page_index: int):
|
||||
logger.debug(f"Entering {Routes.GetPage} with args {_id=}, {page_index=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.mk_body_content_page(page_index)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
function bindDatagrid(datagridId, allowColumnsReordering) {
|
||||
bindScrollbars(datagridId);
|
||||
makeResizable(datagridId)
|
||||
manageScrollbars(datagridId, true);
|
||||
makeResizable(datagridId);
|
||||
}
|
||||
|
||||
function bindScrollbars(datagridId) {
|
||||
@@ -21,7 +21,7 @@ function bindScrollbars(datagridId) {
|
||||
const table = datagrid.querySelector(".dt2-table");
|
||||
|
||||
if (!verticalScrollbar || !verticalWrapper || !horizontalScrollbar || !horizontalWrapper || !body || !table) {
|
||||
console.error("Essential scrollbar or content elements are missing in the datagrid.");
|
||||
console.error("Essential scrollbars or content elements are missing in the datagrid.");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -176,6 +176,224 @@ function bindScrollbars(datagridId) {
|
||||
});
|
||||
}
|
||||
|
||||
function manageScrollbars(datagridId, binding) {
|
||||
console.debug("manageScrollbars on element " + datagridId + " with binding=" + binding);
|
||||
|
||||
const datagrid = document.getElementById(datagridId);
|
||||
|
||||
if (!datagrid) {
|
||||
console.error(`Datagrid with id "${datagridId}" not found.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const verticalScrollbar = datagrid.querySelector(".dt2-scrollbars-vertical");
|
||||
const verticalWrapper = datagrid.querySelector(".dt2-scrollbars-vertical-wrapper");
|
||||
const horizontalScrollbar = datagrid.querySelector(".dt2-scrollbars-horizontal");
|
||||
const horizontalWrapper = datagrid.querySelector(".dt2-scrollbars-horizontal-wrapper");
|
||||
const body = datagrid.querySelector(".dt2-body");
|
||||
const table = datagrid.querySelector(".dt2-table");
|
||||
|
||||
if (!verticalScrollbar || !verticalWrapper || !horizontalScrollbar || !horizontalWrapper || !body || !table) {
|
||||
console.error("Essential scrollbars or content elements are missing in the datagrid.");
|
||||
return;
|
||||
}
|
||||
|
||||
const computeScrollbarVisibility = () => {
|
||||
// Determine if the content is clipped
|
||||
const isVerticalRequired = body.scrollHeight > body.clientHeight;
|
||||
const isHorizontalRequired = table.scrollWidth > table.clientWidth;
|
||||
|
||||
// Show or hide the scrollbar wrappers
|
||||
requestAnimationFrame(() => {
|
||||
verticalWrapper.style.display = isVerticalRequired ? "block" : "none";
|
||||
horizontalWrapper.style.display = isHorizontalRequired ? "block" : "none";
|
||||
});
|
||||
};
|
||||
|
||||
const computeScrollbarSize = () => {
|
||||
// Vertical scrollbar height
|
||||
const visibleHeight = body.clientHeight;
|
||||
const totalHeight = body.scrollHeight;
|
||||
const wrapperHeight = verticalWrapper.offsetHeight;
|
||||
|
||||
let scrollbarHeight = 0;
|
||||
if (totalHeight > 0) {
|
||||
scrollbarHeight = (visibleHeight / totalHeight) * wrapperHeight;
|
||||
}
|
||||
|
||||
// Horizontal scrollbar width
|
||||
const visibleWidth = table.clientWidth;
|
||||
const totalWidth = table.scrollWidth;
|
||||
const wrapperWidth = horizontalWrapper.offsetWidth;
|
||||
|
||||
let scrollbarWidth = 0;
|
||||
if (totalWidth > 0) {
|
||||
scrollbarWidth = (visibleWidth / totalWidth) * wrapperWidth;
|
||||
}
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
verticalScrollbar.style.height = `${scrollbarHeight}px`;
|
||||
horizontalScrollbar.style.width = `${scrollbarWidth}px`;
|
||||
});
|
||||
};
|
||||
|
||||
const updateVerticalScrollbarForMouseWheel = () => {
|
||||
const maxScrollTop = body.scrollHeight - body.clientHeight;
|
||||
const wrapperHeight = verticalWrapper.offsetHeight;
|
||||
|
||||
if (maxScrollTop > 0) {
|
||||
const scrollRatio = wrapperHeight / body.scrollHeight;
|
||||
verticalScrollbar.style.top = `${body.scrollTop * scrollRatio}px`;
|
||||
}
|
||||
};
|
||||
|
||||
if (binding) {
|
||||
// Clean up existing managers if they exist
|
||||
if (datagrid._managers) {
|
||||
// Remove drag events
|
||||
if (datagrid._managers.dragManager) {
|
||||
verticalScrollbar.removeEventListener("mousedown", datagrid._managers.dragManager.verticalMouseDown);
|
||||
horizontalScrollbar.removeEventListener("mousedown", datagrid._managers.dragManager.horizontalMouseDown);
|
||||
document.removeEventListener("mousemove", datagrid._managers.dragManager.mouseMove);
|
||||
document.removeEventListener("mouseup", datagrid._managers.dragManager.mouseUp);
|
||||
}
|
||||
|
||||
// Remove wheel events
|
||||
if (datagrid._managers.wheelManager) {
|
||||
body.removeEventListener("wheel", datagrid._managers.wheelManager.handleWheelScrolling);
|
||||
}
|
||||
|
||||
// Remove resize events
|
||||
if (datagrid._managers.resizeManager) {
|
||||
window.removeEventListener("resize", datagrid._managers.resizeManager.handleResize);
|
||||
}
|
||||
}
|
||||
|
||||
// Create managers
|
||||
const dragManager = {
|
||||
isDragging: false,
|
||||
startY: 0,
|
||||
startX: 0,
|
||||
|
||||
updateVerticalScrollbar: (deltaX, deltaY) => {
|
||||
const wrapperHeight = verticalWrapper.offsetHeight;
|
||||
const scrollbarHeight = verticalScrollbar.offsetHeight;
|
||||
const maxScrollTop = body.scrollHeight - body.clientHeight;
|
||||
const scrollRatio = maxScrollTop / (wrapperHeight - scrollbarHeight);
|
||||
|
||||
let newTop = parseFloat(verticalScrollbar.style.top || "0") + deltaY;
|
||||
newTop = Math.max(0, Math.min(newTop, wrapperHeight - scrollbarHeight));
|
||||
|
||||
verticalScrollbar.style.top = `${newTop}px`;
|
||||
body.scrollTop = newTop * scrollRatio;
|
||||
},
|
||||
|
||||
updateHorizontalScrollbar: (deltaX, deltaY) => {
|
||||
const wrapperWidth = horizontalWrapper.offsetWidth;
|
||||
const scrollbarWidth = horizontalScrollbar.offsetWidth;
|
||||
const maxScrollLeft = table.scrollWidth - table.clientWidth;
|
||||
const scrollRatio = maxScrollLeft / (wrapperWidth - scrollbarWidth);
|
||||
|
||||
let newLeft = parseFloat(horizontalScrollbar.style.left || "0") + deltaX;
|
||||
newLeft = Math.max(0, Math.min(newLeft, wrapperWidth - scrollbarWidth));
|
||||
|
||||
horizontalScrollbar.style.left = `${newLeft}px`;
|
||||
table.scrollLeft = newLeft * scrollRatio;
|
||||
},
|
||||
|
||||
verticalMouseDown: (e) => {
|
||||
disableTooltip();
|
||||
dragManager.isDragging = true;
|
||||
dragManager.startY = e.clientY;
|
||||
dragManager.startX = e.clientX;
|
||||
document.body.style.userSelect = "none";
|
||||
verticalScrollbar.classList.add("dt2-dragging");
|
||||
},
|
||||
|
||||
horizontalMouseDown: (e) => {
|
||||
disableTooltip();
|
||||
dragManager.isDragging = true;
|
||||
dragManager.startY = e.clientY;
|
||||
dragManager.startX = e.clientX;
|
||||
document.body.style.userSelect = "none";
|
||||
horizontalScrollbar.classList.add("dt2-dragging");
|
||||
},
|
||||
|
||||
mouseMove: (e) => {
|
||||
if (dragManager.isDragging) {
|
||||
const deltaY = e.clientY - dragManager.startY;
|
||||
const deltaX = e.clientX - dragManager.startX;
|
||||
|
||||
// Determine which scrollbar is being dragged
|
||||
if (verticalScrollbar.classList.contains("dt2-dragging")) {
|
||||
dragManager.updateVerticalScrollbar(deltaX, deltaY);
|
||||
} else if (horizontalScrollbar.classList.contains("dt2-dragging")) {
|
||||
dragManager.updateHorizontalScrollbar(deltaX, deltaY);
|
||||
}
|
||||
|
||||
// Reset start points for next update
|
||||
dragManager.startY = e.clientY;
|
||||
dragManager.startX = e.clientX;
|
||||
}
|
||||
},
|
||||
|
||||
mouseUp: () => {
|
||||
dragManager.isDragging = false;
|
||||
document.body.style.userSelect = "";
|
||||
verticalScrollbar.classList.remove("dt2-dragging");
|
||||
horizontalScrollbar.classList.remove("dt2-dragging");
|
||||
enableTooltip();
|
||||
}
|
||||
};
|
||||
|
||||
const wheelManager = {
|
||||
handleWheelScrolling: (event) => {
|
||||
const deltaX = event.deltaX;
|
||||
const deltaY = event.deltaY;
|
||||
|
||||
// Scroll the body and table content
|
||||
body.scrollTop += deltaY; // Vertical scrolling
|
||||
table.scrollLeft += deltaX; // Horizontal scrolling
|
||||
|
||||
// Update the vertical scrollbar position
|
||||
updateVerticalScrollbarForMouseWheel();
|
||||
|
||||
// Prevent default behavior to fully manage the scroll
|
||||
event.preventDefault();
|
||||
}
|
||||
};
|
||||
|
||||
const resizeManager = {
|
||||
handleResize: () => {
|
||||
computeScrollbarVisibility();
|
||||
computeScrollbarSize();
|
||||
updateVerticalScrollbarForMouseWheel();
|
||||
}
|
||||
};
|
||||
|
||||
// Store managers on datagrid for cleanup
|
||||
datagrid._managers = {
|
||||
dragManager,
|
||||
wheelManager,
|
||||
resizeManager
|
||||
};
|
||||
|
||||
// Bind events
|
||||
verticalScrollbar.addEventListener("mousedown", dragManager.verticalMouseDown);
|
||||
horizontalScrollbar.addEventListener("mousedown", dragManager.horizontalMouseDown);
|
||||
document.addEventListener("mousemove", dragManager.mouseMove);
|
||||
document.addEventListener("mouseup", dragManager.mouseUp);
|
||||
|
||||
body.addEventListener("wheel", wheelManager.handleWheelScrolling, {passive: false});
|
||||
|
||||
window.addEventListener("resize", resizeManager.handleResize);
|
||||
}
|
||||
|
||||
// Always execute computations
|
||||
computeScrollbarVisibility();
|
||||
computeScrollbarSize();
|
||||
}
|
||||
|
||||
function makeResizable(datagridId) {
|
||||
console.debug("makeResizable on element " + datagridId);
|
||||
|
||||
@@ -494,4 +712,5 @@ function onAfterSettle(datagridId, event) {
|
||||
if (response.includes("hx-on::before-settle")) {
|
||||
bindDatagrid(datagridId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import copy
|
||||
import html
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from typing import Literal, Any
|
||||
@@ -20,9 +21,10 @@ from components.datagrid_new.db_management import DataGridDbManager
|
||||
from components.datagrid_new.settings import DataGridRowState, DataGridColumnState, \
|
||||
DataGridFooterConf, DataGridState, DataGridSettings, DatagridView
|
||||
from components_helpers import mk_icon, mk_ellipsis
|
||||
from core.fasthtml_helper import MyDiv, mk_my_ellipsis, MySpan, mk_my_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager
|
||||
from core.utils import get_unique_id, make_safe_id
|
||||
from core.utils import get_unique_id, make_safe_id, timed
|
||||
|
||||
logger = logging.getLogger("DataGrid")
|
||||
|
||||
@@ -59,6 +61,8 @@ class DataGrid(BaseComponent):
|
||||
self._state: DataGridState = self._db.load_state()
|
||||
self._settings: DataGridSettings = grid_settings or self._db.load_settings()
|
||||
self._df: DataFrame | None = self._db.load_dataframe()
|
||||
self._fast_access = self._init_fast_access(self._df)
|
||||
self._total_rows = len(self._df) if self._df is not None else 0
|
||||
|
||||
# update boundaries if possible
|
||||
self.set_boundaries(boundaries)
|
||||
@@ -118,14 +122,23 @@ class DataGrid(BaseComponent):
|
||||
else:
|
||||
return ColumnType.Text # Default to Text if no match
|
||||
|
||||
def _init_columns(_df):
|
||||
columns = [DataGridColumnState(make_safe_id(col_id),
|
||||
col_index,
|
||||
col_id,
|
||||
_get_column_type(self._df[make_safe_id(col_id)].dtype))
|
||||
for col_index, col_id in enumerate(_df.columns)]
|
||||
if self._state.row_index:
|
||||
columns.insert(0, DataGridColumnState(make_safe_id(ROW_INDEX_ID), -1, " ", ColumnType.RowIndex))
|
||||
|
||||
return columns
|
||||
|
||||
self._df = df.copy()
|
||||
self._df.columns = self._df.columns.map(make_safe_id) # make sure column names are trimmed
|
||||
self._state.rows = [DataGridRowState(row_id) for row_id in self._df.index]
|
||||
self._state.columns = [DataGridColumnState(make_safe_id(col_id),
|
||||
col_index,
|
||||
col_id,
|
||||
_get_column_type(self._df[make_safe_id(col_id)].dtype))
|
||||
for col_index, col_id in enumerate(df.columns)]
|
||||
self._state.columns = _init_columns(df) # use df not self._df to keep the original title
|
||||
self._fast_access = self._init_fast_access(self._df)
|
||||
self._total_rows = len(self._df) if self._df is not None else 0
|
||||
|
||||
if save_state:
|
||||
self._db.save_all(None, self._state, self._df)
|
||||
@@ -205,6 +218,7 @@ class DataGrid(BaseComponent):
|
||||
|
||||
self._state.columns = new_columns_states
|
||||
|
||||
self._fast_access = self._init_fast_access(self._df)
|
||||
self._views.recompute_need_save()
|
||||
|
||||
self._db.save_all(self._settings, self._state, self._df if new_column else None)
|
||||
@@ -439,7 +453,7 @@ class DataGrid(BaseComponent):
|
||||
_mk_keyboard_management(),
|
||||
Div(
|
||||
self.mk_table_header(),
|
||||
self.mk_table_body(),
|
||||
self.mk_table_body_page(),
|
||||
self.mk_table_footer(),
|
||||
cls="dt2-inner-table"),
|
||||
cls="dt2-table",
|
||||
@@ -479,20 +493,18 @@ class DataGrid(BaseComponent):
|
||||
id=f"th_{self._id}"
|
||||
)
|
||||
|
||||
def mk_table_body(self):
|
||||
df = self._get_filtered_df()
|
||||
def mk_table_body_page(self):
|
||||
"""
|
||||
This function is used to update the table body when the vertical scrollbar reaches the end
|
||||
A new page is added when requested
|
||||
"""
|
||||
max_height = self._compute_body_max_height()
|
||||
|
||||
return Div(
|
||||
*[Div(
|
||||
*[self.mk_body_cell(col_pos, row_index, col_def) for col_pos, col_def in enumerate(self._state.columns)],
|
||||
cls="dt2-row",
|
||||
data_row=f"{row_index}",
|
||||
id=f"tr_{self._id}-{row_index}",
|
||||
) for row_index in df.index],
|
||||
*self.mk_body_content_page(0),
|
||||
cls="dt2-body",
|
||||
style=f"max-height:{max_height}px;",
|
||||
id=f"tb_{self._id}"
|
||||
id=f"tb_{self._id}",
|
||||
)
|
||||
|
||||
def mk_table_footer(self):
|
||||
@@ -507,34 +519,55 @@ class DataGrid(BaseComponent):
|
||||
id=f"tf_{self._id}"
|
||||
)
|
||||
|
||||
def mk_body_content_page(self, page_index: int):
|
||||
df = self._get_filtered_df()
|
||||
start = page_index * DATAGRID_PAGE_SIZE
|
||||
end = start + DATAGRID_PAGE_SIZE
|
||||
if self._total_rows > end:
|
||||
last_row = df.index[end - 1]
|
||||
else:
|
||||
last_row = None
|
||||
|
||||
rows = [Div(
|
||||
*[self.mk_body_cell(col_pos, row_index, col_def) for col_pos, col_def in enumerate(self._state.columns)],
|
||||
cls="dt2-row",
|
||||
data_row=f"{row_index}",
|
||||
id=f"tr_{self._id}-{row_index}",
|
||||
**self.commands.get_page(page_index + 1) if row_index == last_row else {}
|
||||
) for row_index in df.index[start:end]]
|
||||
|
||||
rows.append(Script(f"manageScrollbars('{self._id}', false);"), )
|
||||
|
||||
return rows
|
||||
|
||||
def mk_body_cell(self, col_pos, row_index, col_def: DataGridColumnState):
|
||||
if not col_def.usable:
|
||||
return None
|
||||
|
||||
if not col_def.visible:
|
||||
return Div(cls="dt2-col-hidden")
|
||||
return MyDiv(cls="dt2-col-hidden")
|
||||
|
||||
content = self.mk_body_cell_content(col_pos, row_index, col_def)
|
||||
|
||||
return Div(content,
|
||||
data_col=col_def.col_id,
|
||||
style=f"width:{col_def.width}px;",
|
||||
cls="dt2-cell")
|
||||
return MyDiv(content,
|
||||
data_col=col_def.col_id,
|
||||
style=f"width:{col_def.width}px;",
|
||||
cls="dt2-cell")
|
||||
|
||||
def mk_body_cell_content(self, col_pos, row_index, col_def: DataGridColumnState):
|
||||
|
||||
def mk_bool(value):
|
||||
return Div(mk_icon(icon_checked if value else icon_unchecked, can_select=False),
|
||||
cls="dt2-cell-content-checkbox")
|
||||
def mk_bool(_value):
|
||||
return MyDiv(mk_my_icon(icon_checked if _value else icon_unchecked, can_select=False),
|
||||
cls="dt2-cell-content-checkbox")
|
||||
|
||||
def mk_text(value):
|
||||
return mk_ellipsis(value, cls="dt2-cell-content-text")
|
||||
def mk_text(_value):
|
||||
return mk_my_ellipsis(_value, cls="dt2-cell-content-text")
|
||||
|
||||
def mk_number(value):
|
||||
return mk_ellipsis(value, cls="dt2-cell-content-number")
|
||||
def mk_number(_value):
|
||||
return mk_my_ellipsis(_value, cls="dt2-cell-content-number")
|
||||
|
||||
def process_cell_content(value):
|
||||
value_str = str(value)
|
||||
def process_cell_content(_value):
|
||||
value_str = html.escape(str(_value))
|
||||
|
||||
if FILTER_INPUT_CID not in self._state.filtered or (
|
||||
keyword := self._state.filtered[FILTER_INPUT_CID]) is None:
|
||||
@@ -545,21 +578,22 @@ class DataGrid(BaseComponent):
|
||||
return value_str
|
||||
|
||||
len_keyword = len(keyword)
|
||||
res = [Span(value_str[:index])] if index > 0 else []
|
||||
res += [Span(value_str[index:index + len_keyword], cls="dt2-highlight-1")]
|
||||
res += [Span(value_str[index + len_keyword:])] if len(value_str) > len_keyword else []
|
||||
res = [MySpan(value_str[:index])] if index > 0 else []
|
||||
res += [MySpan(value_str[index:index + len_keyword], cls="dt2-highlight-1")]
|
||||
res += [MySpan(value_str[index + len_keyword:])] if len(value_str) > len_keyword else []
|
||||
return tuple(res)
|
||||
|
||||
column_type = col_def.type
|
||||
value = self._fast_access[col_def.col_id][row_index]
|
||||
|
||||
if column_type == ColumnType.Bool:
|
||||
content = mk_bool(self._df.iloc[row_index, col_def.col_index])
|
||||
content = mk_bool(value)
|
||||
elif column_type == ColumnType.Number:
|
||||
content = mk_number(process_cell_content(self._df.iloc[row_index, col_def.col_index]))
|
||||
content = mk_number(process_cell_content(value))
|
||||
elif column_type == ColumnType.RowIndex:
|
||||
content = mk_number(row_index)
|
||||
else:
|
||||
content = mk_text(process_cell_content(self._df.iloc[row_index, col_def.col_index]))
|
||||
content = mk_text(process_cell_content(value))
|
||||
|
||||
return content
|
||||
|
||||
@@ -822,6 +856,31 @@ class DataGrid(BaseComponent):
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _init_fast_access(df):
|
||||
"""
|
||||
Generates a fast-access dictionary for a DataFrame.
|
||||
|
||||
This method converts the columns of the provided DataFrame into NumPy arrays
|
||||
and stores them as values in a dictionary, using the column names as keys.
|
||||
This allows for efficient access to the data stored in the DataFrame.
|
||||
|
||||
Args:
|
||||
df (DataFrame): The input pandas DataFrame whose columns are to be converted
|
||||
into a dictionary of NumPy arrays.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary where the keys are the column names of the input DataFrame
|
||||
and the values are the corresponding column values as NumPy arrays.
|
||||
"""
|
||||
if df is None:
|
||||
return {}
|
||||
|
||||
res = {col: df[col].to_numpy() for col in df.columns}
|
||||
res[ROW_INDEX_ID] = df.index.to_numpy()
|
||||
return res
|
||||
|
||||
@timed
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
Div(
|
||||
@@ -844,7 +903,7 @@ class DataGrid(BaseComponent):
|
||||
@staticmethod
|
||||
def new(session, data, index=None):
|
||||
datagrid = DataGrid(session, DataGrid.create_component_id(session))
|
||||
#dataframe = DataFrame(data, index=index)
|
||||
# dataframe = DataFrame(data, index=index)
|
||||
dataframe = DataFrame(data)
|
||||
datagrid.init_from_dataframe(dataframe)
|
||||
return datagrid
|
||||
|
||||
@@ -91,12 +91,21 @@ class DataGridCommandManager(BaseCommandManager):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.OnClick}",
|
||||
"hx-target": f"#tsm_{self._id}",
|
||||
"hx-trigger" : "click",
|
||||
"hx-trigger": "click",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{_id: "{self._id}", cell_id:getCellId(event), modifier:getClickModifier(event), boundaries: getCellBoundaries(event)}}',
|
||||
"hx-on::before-request": f'validateOnClickRequest("{self._id}", event)',
|
||||
}
|
||||
|
||||
def get_page(self, page_index=0):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.GetPage}",
|
||||
"hx-target": f"#tb_{self._id}",
|
||||
"hx-swap": "beforeend",
|
||||
"hx-vals": f'{{"_id": "{self._id}", "page_index": "{page_index}"}}',
|
||||
"hx-trigger": f"intersect root:#tb_{self._id} once",
|
||||
}
|
||||
|
||||
def _get_hide_show_columns_attrs(self, mode, col_defs: list, new_value, cls=""):
|
||||
str_col_names = ", ".join(f"'{col_def.title}'" for col_def in col_defs)
|
||||
tooltip_msg = f"{mode} column{'s' if len(col_defs) > 1 else ''} {str_col_names}"
|
||||
@@ -109,38 +118,6 @@ class DataGridCommandManager(BaseCommandManager):
|
||||
"data_tooltip": tooltip_msg,
|
||||
"cls": self.merge_class(cls, "mmt-tooltip")
|
||||
}
|
||||
#
|
||||
# @staticmethod
|
||||
# def merge(*items):
|
||||
# """
|
||||
# Merges multiple dictionaries into a single dictionary by combining their key-value pairs.
|
||||
# If a key exists in multiple dictionaries and its value is a string, the values are concatenated.
|
||||
# If the key's value is not a string, an error is raised.
|
||||
#
|
||||
# :param items: dictionaries to be merged. If all items are None, None is returned.
|
||||
# :return: A single dictionary containing the merged key-value pairs from all input dictionaries.
|
||||
# :raises NotImplementedError: If a key's value is not a string and exists in multiple input dictionaries.
|
||||
# """
|
||||
# if all(item is None for item in items):
|
||||
# return None
|
||||
#
|
||||
# res = {}
|
||||
# for item in [item for item in items if item is not None]:
|
||||
#
|
||||
# for key, value in item.items():
|
||||
# if not key in res:
|
||||
# res[key] = value
|
||||
# else:
|
||||
# if isinstance(res[key], str):
|
||||
# res[key] += " " + value
|
||||
# else:
|
||||
# raise NotImplementedError("")
|
||||
#
|
||||
# return res
|
||||
#
|
||||
# @staticmethod
|
||||
# def merge_class(cls1, cls2):
|
||||
# return (cls1 + " " + cls2) if cls2 else cls1
|
||||
|
||||
|
||||
class FilterAllCommands(BaseCommandManager):
|
||||
@@ -165,4 +142,4 @@ class FilterAllCommands(BaseCommandManager):
|
||||
"hx_vals": f'{{"_id": "{self._id}", "col_id":"{FILTER_INPUT_CID}"}}',
|
||||
"data_tooltip": "Reset filter",
|
||||
"cls": self.merge_class(cls, "mmt-tooltip"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,9 @@ CONTAINER_HEIGHT = "container_height"
|
||||
|
||||
DATAGRID_STATE_FOOTER = "footer"
|
||||
|
||||
DATAGRID_PAGE_SIZE = 50
|
||||
|
||||
ROW_INDEX_ID = "__row_index__"
|
||||
|
||||
class Routes:
|
||||
Filter = "/filter" # request the filtering in the grid
|
||||
@@ -33,6 +36,7 @@ class Routes:
|
||||
UpdateView = "/update_view"
|
||||
ShowFooterMenu = "/show_footer_menu"
|
||||
UpdateState = "/update_state"
|
||||
GetPage = "/page"
|
||||
|
||||
|
||||
class ColumnType(Enum):
|
||||
@@ -44,11 +48,13 @@ class ColumnType(Enum):
|
||||
Choice = "Choice"
|
||||
List = "List"
|
||||
|
||||
|
||||
class ViewType(Enum):
|
||||
Table = "Table"
|
||||
Chart = "Chart"
|
||||
Form = "Form"
|
||||
|
||||
|
||||
class FooterAggregation(Enum):
|
||||
Sum = "Sum"
|
||||
Mean = "Mean"
|
||||
@@ -59,4 +65,4 @@ class FooterAggregation(Enum):
|
||||
FilteredMean = "FilteredMean"
|
||||
FilteredMin = "FilteredMin"
|
||||
FilteredMax = "FilteredMax"
|
||||
FilteredCount = "FilteredCount"
|
||||
FilteredCount = "FilteredCount"
|
||||
|
||||
@@ -69,6 +69,7 @@ class DataGridSettings:
|
||||
class DataGridState:
|
||||
sidebar_visible: bool = False
|
||||
selected_view: str = None
|
||||
row_index: bool = False
|
||||
columns: list[DataGridColumnState] = dataclasses.field(default_factory=list)
|
||||
rows: list[DataGridRowState] = dataclasses.field(default_factory=list) # only the rows that have a specific state
|
||||
footers: list[DataGridFooterConf] = dataclasses.field(default_factory=list)
|
||||
|
||||
@@ -62,7 +62,7 @@ class JsonViewerHelper:
|
||||
class JsonViewer(BaseComponent):
|
||||
def __init__(self, session, _id, owner, user_id, data, hooks=None, key=None, boundaries=None):
|
||||
super().__init__(session, _id)
|
||||
self._key = key
|
||||
self._key = key # for comparison between two jsonviewer components
|
||||
self._owner = owner # debugger component
|
||||
self.user_id = user_id
|
||||
self.data = data
|
||||
@@ -88,6 +88,10 @@ class JsonViewer(BaseComponent):
|
||||
|
||||
self._helper = JsonViewerHelper()
|
||||
|
||||
def set_data(self, data):
|
||||
self.data = data
|
||||
self.node = self._create_node(None, data)
|
||||
|
||||
def set_node_folding(self, node_id, folding):
|
||||
if folding == self._folding_mode:
|
||||
self._nodes_to_track.remove(node_id)
|
||||
@@ -311,8 +315,6 @@ class JsonViewer(BaseComponent):
|
||||
def __hash__(self):
|
||||
return hash(self._key) if self._key is not None else super().__hash__()
|
||||
|
||||
|
||||
|
||||
@staticmethod
|
||||
def add_quotes(value: str):
|
||||
if '"' in value and "'" in value:
|
||||
|
||||
26
src/components/entryselector/EntrySelectorApp.py
Normal file
26
src/components/entryselector/EntrySelectorApp.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import logging
|
||||
|
||||
from fasthtml.fastapp import fast_app
|
||||
|
||||
from components.entryselector.constants import Routes
|
||||
from core.instance_manager import debug_session, InstanceManager
|
||||
|
||||
logger = logging.getLogger("EntrySelectorApp")
|
||||
|
||||
repositories_app, rt = fast_app()
|
||||
|
||||
|
||||
@rt(Routes.Select)
|
||||
def get(session, _id: str, entry: str):
|
||||
logger.debug(f"Entering {Routes.Select} with args {debug_session(session)}, {_id=}, {entry=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
to_update = instance.select_entry(entry)
|
||||
|
||||
res = [instance]
|
||||
if res is None:
|
||||
return instance
|
||||
if isinstance(to_update, (list, tuple)):
|
||||
res.extend(to_update)
|
||||
else:
|
||||
res.append(to_update)
|
||||
return tuple(res)
|
||||
0
src/components/entryselector/__init__.py
Normal file
0
src/components/entryselector/__init__.py
Normal file
20
src/components/entryselector/assets/EntrySelector.css
Normal file
20
src/components/entryselector/assets/EntrySelector.css
Normal file
@@ -0,0 +1,20 @@
|
||||
.es-container {
|
||||
overflow-x: auto;
|
||||
white-space: nowrap;
|
||||
|
||||
}
|
||||
|
||||
.es-entry {
|
||||
border: 2px solid var(--color-base-300);
|
||||
padding: 2px;
|
||||
cursor: pointer;
|
||||
display: inline-block; /* Ensure entries align horizontally if needed */
|
||||
}
|
||||
|
||||
.es-entry-selected {
|
||||
border: 2px solid var(--color-primary);
|
||||
}
|
||||
|
||||
.es-entry:hover {
|
||||
background-color: var(--color-base-300);
|
||||
}
|
||||
0
src/components/entryselector/assets/__init__.py
Normal file
0
src/components/entryselector/assets/__init__.py
Normal file
15
src/components/entryselector/commands.py
Normal file
15
src/components/entryselector/commands.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from components.BaseCommandManager import BaseCommandManager
|
||||
from components.entryselector.constants import Routes, ROUTE_ROOT
|
||||
|
||||
|
||||
class EntrySelectorCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
super().__init__(owner)
|
||||
|
||||
def select_entry(self, entry):
|
||||
return {
|
||||
"hx-get": f"{ROUTE_ROOT}{Routes.Select}",
|
||||
"hx-target": f"#{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}", "entry": "{entry}"}}',
|
||||
}
|
||||
56
src/components/entryselector/components/EntrySelector.py
Normal file
56
src/components/entryselector/components/EntrySelector.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import logging
|
||||
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.BaseComponent import BaseComponentMultipleInstance
|
||||
from components.entryselector.commands import EntrySelectorCommandManager
|
||||
|
||||
logger = logging.getLogger("EntrySelector")
|
||||
|
||||
|
||||
class EntrySelector(BaseComponentMultipleInstance):
|
||||
def __init__(self, session, _id, owner, data=None, hooks=None, key=None, boundaries=None):
|
||||
super().__init__(session, _id)
|
||||
self._key = key
|
||||
self._owner = owner # debugger component
|
||||
self.data = data
|
||||
self.selected = None
|
||||
self.hooks = hooks
|
||||
self._boundaries = boundaries if boundaries else {"width": "300"}
|
||||
self._commands = EntrySelectorCommandManager(self)
|
||||
|
||||
def set_data(self, data):
|
||||
self.data = data
|
||||
|
||||
def set_selected(self, selected):
|
||||
if selected is None:
|
||||
self.selected = None
|
||||
else:
|
||||
self.selected = int(selected)
|
||||
|
||||
def set_boundaries(self, boundaries):
|
||||
self._boundaries = boundaries
|
||||
|
||||
def select_entry(self, entry):
|
||||
logger.debug(f"Selecting entry {entry}")
|
||||
self.set_selected(entry)
|
||||
if self.hooks is not None and (on_entry_selected := self.hooks.get("on_entry_selected", None)) is not None:
|
||||
return on_entry_selected(entry)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _mk_content(self):
|
||||
if not self.data:
|
||||
return [Div("no entry")]
|
||||
|
||||
return [Div(index,
|
||||
**self._commands.select_entry(index),
|
||||
cls=f"es-entry {'es-entry-selected' if index == self.selected else ''}")
|
||||
for index in range(self.data)]
|
||||
|
||||
def __ft__(self):
|
||||
return Div(
|
||||
*self._mk_content(),
|
||||
cls="flex es-container",
|
||||
id=f"{self._id}",
|
||||
)
|
||||
0
src/components/entryselector/components/__init__.py
Normal file
0
src/components/entryselector/components/__init__.py
Normal file
5
src/components/entryselector/constants.py
Normal file
5
src/components/entryselector/constants.py
Normal file
@@ -0,0 +1,5 @@
|
||||
ROUTE_ROOT = "/es" # for EntrySelector
|
||||
|
||||
|
||||
class Routes:
|
||||
Select = "/select"
|
||||
18
src/components/jsonviewer/JsonViewerApp.py
Normal file
18
src/components/jsonviewer/JsonViewerApp.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import logging
|
||||
|
||||
from fasthtml.fastapp import fast_app
|
||||
|
||||
from components.jsonviewer.constants import Routes
|
||||
from core.instance_manager import debug_session, InstanceManager
|
||||
|
||||
jsonviwer_app, rt = fast_app()
|
||||
|
||||
logger = logging.getLogger("JsonViewer")
|
||||
|
||||
|
||||
@rt(Routes.Fold)
|
||||
def post(session, _id: str, node_id: str, folding: str):
|
||||
logger.debug(f"Entering {Routes.Fold} with args {debug_session(session)}, {_id=}, {node_id=}, {folding=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
instance.set_node_folding(node_id, folding)
|
||||
return instance.render_node(node_id)
|
||||
449
src/components/jsonviewer/Readme.md
Normal file
449
src/components/jsonviewer/Readme.md
Normal file
@@ -0,0 +1,449 @@
|
||||
# JsonViewer Hooks System - Technical Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The JsonViewer Hooks System provides a flexible, event-driven mechanism to customize the behavior and rendering of JSON nodes. Using a fluent builder pattern, developers can define conditions and actions that trigger during specific events in the JsonViewer lifecycle.
|
||||
|
||||
## Core Concepts
|
||||
|
||||
### Hook Architecture
|
||||
|
||||
A **Hook** consists of three components:
|
||||
- **Event Type**: When the hook should trigger (`on_render`, `on_click`, etc.)
|
||||
- **Conditions**: What criteria must be met for the hook to execute
|
||||
- **Executor**: The function that runs when conditions are met
|
||||
|
||||
### HookContext
|
||||
|
||||
The `HookContext` object provides rich information about the current node being processed:
|
||||
|
||||
```python
|
||||
class HookContext:
|
||||
key: Any # The key of the current node
|
||||
node: Any # The node object itself
|
||||
helper: Any # JsonViewer helper utilities
|
||||
jsonviewer: Any # Reference to the parent JsonViewer instance
|
||||
json_path: str # Full JSON path (e.g., "users.0.name")
|
||||
parent_node: Any # Reference to the parent node
|
||||
metadata: dict # Additional metadata storage
|
||||
```
|
||||
|
||||
**Utility Methods:**
|
||||
- `get_node_type()`: Returns the string representation of the node type
|
||||
- `get_value()`: Gets the actual value from the node
|
||||
- `is_leaf_node()`: Checks if the node has no children
|
||||
|
||||
## HookBuilder API
|
||||
|
||||
### Creating a Hook
|
||||
|
||||
Use the `HookBuilder` class with method chaining to create hooks:
|
||||
|
||||
```python
|
||||
hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_long_text(100)
|
||||
.execute(my_custom_renderer))
|
||||
```
|
||||
|
||||
### Event Types
|
||||
|
||||
#### `on_render()`
|
||||
Triggers during node rendering, allowing custom rendering logic.
|
||||
|
||||
```python
|
||||
def custom_text_renderer(context):
|
||||
value = context.get_value()
|
||||
return Span(f"Custom: {value}", cls="custom-text")
|
||||
|
||||
text_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_type(str)
|
||||
.execute(custom_text_renderer))
|
||||
```
|
||||
|
||||
#### `on_click()`
|
||||
Triggers when a node is clicked.
|
||||
|
||||
```python
|
||||
def handle_click(context):
|
||||
print(f"Clicked on: {context.json_path}")
|
||||
return None # No rendering change
|
||||
|
||||
click_hook = (HookBuilder()
|
||||
.on_click()
|
||||
.when_editable()
|
||||
.requires_modification()
|
||||
.execute(handle_click))
|
||||
```
|
||||
|
||||
#### `on_hover()` / `on_focus()`
|
||||
Triggers on hover or focus events respectively.
|
||||
|
||||
```python
|
||||
def show_tooltip(context):
|
||||
return Div(f"Path: {context.json_path}", cls="tooltip")
|
||||
|
||||
hover_hook = (HookBuilder()
|
||||
.on_hover()
|
||||
.when_type(str)
|
||||
.execute(show_tooltip))
|
||||
```
|
||||
|
||||
## Conditions
|
||||
|
||||
Conditions determine when a hook should execute. Multiple conditions can be chained, and all must be satisfied.
|
||||
|
||||
### `when_type(target_type)`
|
||||
Matches nodes with values of a specific type.
|
||||
|
||||
```python
|
||||
# Hook for string values only
|
||||
string_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_type(str)
|
||||
.execute(string_formatter))
|
||||
|
||||
# Hook for numeric values
|
||||
number_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_type((int, float)) # Accepts tuple of types
|
||||
.execute(number_formatter))
|
||||
```
|
||||
|
||||
### `when_key(key_pattern)`
|
||||
Matches nodes based on their key.
|
||||
|
||||
```python
|
||||
# Exact key match
|
||||
email_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_key("email")
|
||||
.execute(email_formatter))
|
||||
|
||||
# Function-based key matching
|
||||
def is_id_key(key):
|
||||
return str(key).endswith("_id")
|
||||
|
||||
id_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_key(is_id_key)
|
||||
.execute(id_formatter))
|
||||
```
|
||||
|
||||
### `when_value(target_value=None, predicate=None)`
|
||||
Matches nodes based on their actual value.
|
||||
|
||||
**Exact value matching:**
|
||||
```python
|
||||
# Highlight error status
|
||||
error_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_value("ERROR")
|
||||
.execute(lambda ctx: Span(ctx.get_value(), cls="error-status")))
|
||||
|
||||
# Special handling for null values
|
||||
null_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_value(None)
|
||||
.execute(lambda ctx: Span("N/A", cls="null-value")))
|
||||
```
|
||||
|
||||
**Predicate-based matching:**
|
||||
```python
|
||||
# URLs as clickable links
|
||||
url_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_value(predicate=lambda x: isinstance(x, str) and x.startswith("http"))
|
||||
.execute(lambda ctx: A(ctx.get_value(), href=ctx.get_value(), target="_blank")))
|
||||
|
||||
# Large numbers formatting
|
||||
large_number_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_value(predicate=lambda x: isinstance(x, (int, float)) and x > 1000)
|
||||
.execute(lambda ctx: Span(f"{x:,}", cls="large-number")))
|
||||
```
|
||||
|
||||
### `when_path(path_pattern)`
|
||||
Matches nodes based on their JSON path using regex.
|
||||
|
||||
```python
|
||||
# Match all user names
|
||||
user_name_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_path(r"users\.\d+\.name")
|
||||
.execute(user_name_formatter))
|
||||
|
||||
# Match any nested configuration
|
||||
config_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_path(r".*\.config\..*")
|
||||
.execute(config_formatter))
|
||||
```
|
||||
|
||||
### `when_long_text(threshold=100)`
|
||||
Matches string values longer than the specified threshold.
|
||||
|
||||
```python
|
||||
def text_truncator(context):
|
||||
value = context.get_value()
|
||||
truncated = value[:100] + "..."
|
||||
return Div(
|
||||
Span(truncated, cls="truncated-text"),
|
||||
Button("Show more", cls="expand-btn"),
|
||||
cls="long-text-container"
|
||||
)
|
||||
|
||||
long_text_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_long_text(100)
|
||||
.execute(text_truncator))
|
||||
```
|
||||
|
||||
### `when_editable(editable_paths=None, editable_types=None)`
|
||||
Matches nodes that should be editable.
|
||||
|
||||
```python
|
||||
def inline_editor(context):
|
||||
value = context.get_value()
|
||||
return Input(
|
||||
value=str(value),
|
||||
type="text" if isinstance(value, str) else "number",
|
||||
cls="inline-editor",
|
||||
**{"data-path": context.json_path}
|
||||
)
|
||||
|
||||
editable_hook = (HookBuilder()
|
||||
.on_click()
|
||||
.when_editable(
|
||||
editable_paths=["user.name", "user.email"],
|
||||
editable_types=[str, int, float]
|
||||
)
|
||||
.requires_modification()
|
||||
.execute(inline_editor))
|
||||
```
|
||||
|
||||
### `when_custom(condition)`
|
||||
Use custom condition objects or callable predicates for complex logic.
|
||||
|
||||
The `when_custom()` method accepts either:
|
||||
- **Condition instances**: Objects that inherit from the `Condition` base class
|
||||
- **Callable predicates**: Functions that take a `HookContext` parameter and return a boolean
|
||||
|
||||
When a callable is provided, it's automatically wrapped in a `PredicateCondition` class internally.
|
||||
|
||||
```python
|
||||
class BusinessLogicCondition(Condition):
|
||||
def evaluate(self, context):
|
||||
# Complex business logic here
|
||||
return (context.key == "status" and
|
||||
context.get_value() in ["pending", "processing"])
|
||||
|
||||
custom_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_custom(BusinessLogicCondition())
|
||||
.execute(status_renderer))
|
||||
```
|
||||
|
||||
## Combining Conditions
|
||||
|
||||
### Multiple Conditions (AND Logic)
|
||||
Chain multiple conditions - all must be satisfied:
|
||||
|
||||
```python
|
||||
complex_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_type(str)
|
||||
.when_key("description")
|
||||
.when_long_text(50)
|
||||
.execute(description_formatter))
|
||||
```
|
||||
|
||||
### Composite Conditions
|
||||
Use `when_all()` and `when_any()` for explicit logic:
|
||||
|
||||
```python
|
||||
# AND logic
|
||||
strict_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_all([
|
||||
WhenType(str),
|
||||
WhenLongText(100),
|
||||
WhenKey("content")
|
||||
])
|
||||
.execute(content_formatter))
|
||||
|
||||
# OR logic
|
||||
flexible_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_any([
|
||||
WhenKey("title"),
|
||||
WhenKey("name"),
|
||||
WhenKey("label")
|
||||
])
|
||||
.execute(title_formatter))
|
||||
```
|
||||
|
||||
## State Modification
|
||||
|
||||
Use `requires_modification()` to indicate that the hook will modify the application state:
|
||||
|
||||
```python
|
||||
def save_edit(context):
|
||||
new_value = get_new_value_from_ui() # Implementation specific
|
||||
# Update the actual data
|
||||
context.jsonviewer.update_value(context.json_path, new_value)
|
||||
return success_indicator()
|
||||
|
||||
edit_hook = (HookBuilder()
|
||||
.on_click()
|
||||
.when_editable()
|
||||
.requires_modification()
|
||||
.execute(save_edit))
|
||||
```
|
||||
|
||||
## Complete Examples
|
||||
|
||||
### Example 1: Enhanced Text Display
|
||||
|
||||
```python
|
||||
def enhanced_text_renderer(context):
|
||||
value = context.get_value()
|
||||
|
||||
# Truncate long text
|
||||
if len(value) > 100:
|
||||
display_value = value[:100] + "..."
|
||||
tooltip = value # Full text as tooltip
|
||||
else:
|
||||
display_value = value
|
||||
tooltip = None
|
||||
|
||||
return Span(
|
||||
display_value,
|
||||
cls="enhanced-text",
|
||||
title=tooltip,
|
||||
**{"data-full-text": value}
|
||||
)
|
||||
|
||||
text_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_type(str)
|
||||
.when_value(predicate=lambda x: len(x) > 20)
|
||||
.execute(enhanced_text_renderer))
|
||||
```
|
||||
|
||||
### Example 2: Interactive Email Fields
|
||||
|
||||
```python
|
||||
def email_renderer(context):
|
||||
email = context.get_value()
|
||||
return Div(
|
||||
A(f"mailto:{email}", href=f"mailto:{email}", cls="email-link"),
|
||||
Button("Copy", cls="copy-btn", **{"data-clipboard": email}),
|
||||
cls="email-container"
|
||||
)
|
||||
|
||||
email_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_key("email")
|
||||
.when_value(predicate=lambda x: "@" in str(x))
|
||||
.execute(email_renderer))
|
||||
```
|
||||
|
||||
### Example 3: Status Badge System
|
||||
|
||||
```python
|
||||
def status_badge(context):
|
||||
status = context.get_value().lower()
|
||||
|
||||
badge_classes = {
|
||||
"active": "badge-success",
|
||||
"pending": "badge-warning",
|
||||
"error": "badge-danger",
|
||||
"inactive": "badge-secondary"
|
||||
}
|
||||
|
||||
css_class = badge_classes.get(status, "badge-default")
|
||||
|
||||
return Span(
|
||||
status.title(),
|
||||
cls=f"status-badge {css_class}"
|
||||
)
|
||||
|
||||
status_hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_key("status")
|
||||
.when_value(predicate=lambda x: str(x).lower() in ["active", "pending", "error", "inactive"])
|
||||
.execute(status_badge))
|
||||
```
|
||||
|
||||
## Integration with JsonViewer
|
||||
|
||||
### Adding Hooks to JsonViewer
|
||||
|
||||
```python
|
||||
# Create your hooks
|
||||
hooks = [
|
||||
text_hook,
|
||||
email_hook,
|
||||
status_hook
|
||||
]
|
||||
|
||||
# Initialize JsonViewer with hooks
|
||||
viewer = JsonViewer(
|
||||
session=session,
|
||||
_id="my-viewer",
|
||||
data=my_json_data,
|
||||
hooks=hooks
|
||||
)
|
||||
```
|
||||
|
||||
### Factory Functions
|
||||
|
||||
Create reusable hook factories for common patterns:
|
||||
|
||||
```python
|
||||
def create_url_link_hook():
|
||||
"""Factory for URL link rendering"""
|
||||
def url_renderer(context):
|
||||
url = context.get_value()
|
||||
return A(url, href=url, target="_blank", cls="url-link")
|
||||
|
||||
return (HookBuilder()
|
||||
.on_render()
|
||||
.when_value(predicate=lambda x: isinstance(x, str) and x.startswith(("http://", "https://")))
|
||||
.execute(url_renderer))
|
||||
|
||||
def create_currency_formatter_hook(currency_symbol="$"):
|
||||
"""Factory for currency formatting"""
|
||||
def currency_renderer(context):
|
||||
amount = context.get_value()
|
||||
return Span(f"{currency_symbol}{amount:,.2f}", cls="currency-amount")
|
||||
|
||||
return (HookBuilder()
|
||||
.on_render()
|
||||
.when_type((int, float))
|
||||
.when_key(lambda k: "price" in str(k).lower() or "amount" in str(k).lower())
|
||||
.execute(currency_renderer))
|
||||
|
||||
# Usage
|
||||
hooks = [
|
||||
create_url_link_hook(),
|
||||
create_currency_formatter_hook("€"),
|
||||
]
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Specific Conditions**: Use the most specific conditions possible to avoid unintended matches
|
||||
2. **Performance**: Avoid complex predicates in `when_value()` for large datasets
|
||||
3. **Error Handling**: Include error handling in your executor functions
|
||||
4. **Reusability**: Create factory functions for common hook patterns
|
||||
5. **Testing**: Test hooks with various data structures to ensure they work as expected
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- Hooks are evaluated in the order they are added to the JsonViewer
|
||||
- Only the first matching hook for each event type will execute per node
|
||||
- Use simple conditions when possible to minimize evaluation time
|
||||
- Consider the size of your JSON data when using regex in `when_path()`
|
||||
0
src/components/jsonviewer/__init__.py
Normal file
0
src/components/jsonviewer/__init__.py
Normal file
0
src/components/jsonviewer/assets/__init__.py
Normal file
0
src/components/jsonviewer/assets/__init__.py
Normal file
27
src/components/jsonviewer/assets/icons.py
Normal file
27
src/components/jsonviewer/assets/icons.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
# Fluent CaretRight20Filled
|
||||
icon_collapsed = NotStr("""<svg name="collapsed" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20">
|
||||
<g fill="none">
|
||||
<path d="M7 14.204a1 1 0 0 0 1.628.778l4.723-3.815a1.5 1.5 0 0 0 0-2.334L8.628 5.02A1 1 0 0 0 7 5.797v8.407z" fill="currentColor">
|
||||
</path>
|
||||
</g>
|
||||
</svg>""")
|
||||
|
||||
# Fluent CaretDown20Filled
|
||||
icon_expanded = NotStr("""<svg name="expanded" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20">
|
||||
<g fill="none">
|
||||
<path d="M5.797 7a1 1 0 0 0-.778 1.628l3.814 4.723a1.5 1.5 0 0 0 2.334 0l3.815-4.723A1 1 0 0 0 14.204 7H5.797z" fill="currentColor">
|
||||
</path>
|
||||
</g>
|
||||
</svg>""")
|
||||
|
||||
icon_class = NotStr("""
|
||||
<svg name="expanded" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
|
||||
<g fill="none" stroke="currentColor" stroke-width="1.5" >
|
||||
<polygon points="5,2 2,8 8,8" />
|
||||
<rect x="12" y="2" width="6" height="6"/>
|
||||
<circle cx="5" cy="15" r="3" />
|
||||
<polygon points="11.5,15 15,11.5 18.5,15 15,18.5" />
|
||||
</g>
|
||||
</svg>""")
|
||||
23
src/components/jsonviewer/commands.py
Normal file
23
src/components/jsonviewer/commands.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from components.jsonviewer.constants import ROUTE_ROOT, Routes
|
||||
|
||||
|
||||
class JsonViewerCommands:
|
||||
def __init__(self, owner):
|
||||
self._owner = owner
|
||||
self._id = owner.get_id()
|
||||
|
||||
def fold(self, node_id: str, folding: str):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.Fold}",
|
||||
"hx-target": f"#{node_id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}", "node_id": "{node_id}", "folding": "{folding}"}}',
|
||||
}
|
||||
|
||||
def open_digest(self, user_id, digest):
|
||||
return {
|
||||
"hx-post": f"{ROUTE_ROOT}{Routes.DbEngineDigest}",
|
||||
"hx-target": f"#{self._owner.get_owner().tabs_manager.get_id()}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'{{"_id": "{self._id}", "user_id": "{user_id}", "digest": "{digest}"}}',
|
||||
}
|
||||
544
src/components/jsonviewer/components/JsonViewer.py
Normal file
544
src/components/jsonviewer/components/JsonViewer.py
Normal file
@@ -0,0 +1,544 @@
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Optional
|
||||
|
||||
from fasthtml.components import *
|
||||
from pandas import DataFrame
|
||||
|
||||
from components.BaseComponent import BaseComponentMultipleInstance
|
||||
from components.datagrid_new.components.DataGrid import DataGrid
|
||||
from components.jsonviewer.assets.icons import icon_expanded, icon_collapsed, icon_class
|
||||
from components.jsonviewer.commands import JsonViewerCommands
|
||||
from components.jsonviewer.constants import NODES_KEYS_TO_NOT_EXPAND, NODE_OBJECT, INDENT_SIZE, MAX_TEXT_LENGTH
|
||||
from components.jsonviewer.hooks import HookManager, HookContext, EventType, Hook
|
||||
from components_helpers import apply_boundaries
|
||||
from core.serializer import TAG_OBJECT
|
||||
|
||||
|
||||
class FoldingMode:
|
||||
COLLAPSE = "collapse"
|
||||
EXPAND = "expand"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Node:
|
||||
value: Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class ValueNode(Node):
|
||||
hint: str = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ListNode(Node):
|
||||
node_id: str
|
||||
level: int
|
||||
children: list[Node] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class DictNode(Node):
|
||||
node_id: str
|
||||
level: int
|
||||
children: dict[str, Node] = field(default_factory=dict)
|
||||
|
||||
|
||||
class NodeIdGenerator:
|
||||
"""Manages unique node ID generation"""
|
||||
|
||||
def __init__(self, base_id: str):
|
||||
self.base_id = base_id
|
||||
self._counter = -1
|
||||
|
||||
def generate(self) -> str:
|
||||
self._counter += 1
|
||||
return f"{self.base_id}-{self._counter}"
|
||||
|
||||
def reset(self):
|
||||
self._counter = -1
|
||||
|
||||
|
||||
class FoldingManager:
|
||||
"""Manages folding/unfolding state of nodes"""
|
||||
|
||||
# A little explanation on how the folding / unfolding work
|
||||
# all the nodes are either fold or unfold... except when there are not !
|
||||
# self._folding_mode keeps the current value (it's FoldingMode.COLLAPSE or FoldingMode.EXPAND
|
||||
# self._nodes_to_track keeps track of the exceptions
|
||||
# The idea is to minimize the memory usage
|
||||
|
||||
def __init__(self, default_mode: str = FoldingMode.COLLAPSE):
|
||||
self._folding_mode = default_mode
|
||||
self._nodes_to_track = set() # exceptions to the default mode
|
||||
|
||||
def set_folding_mode(self, mode: str):
|
||||
"""Changes the global folding mode and clears exceptions"""
|
||||
self._folding_mode = mode
|
||||
self._nodes_to_track.clear()
|
||||
|
||||
def set_node_folding(self, node_id: str, folding: str):
|
||||
"""Sets specific folding state for a node"""
|
||||
if folding == self._folding_mode:
|
||||
self._nodes_to_track.discard(node_id)
|
||||
else:
|
||||
self._nodes_to_track.add(node_id)
|
||||
|
||||
def must_expand(self, node: Node) -> Optional[bool]:
|
||||
"""Determines if a node should be expanded"""
|
||||
if not isinstance(node, (ListNode, DictNode)):
|
||||
return None
|
||||
|
||||
if self._folding_mode == FoldingMode.COLLAPSE:
|
||||
return node.node_id in self._nodes_to_track
|
||||
else:
|
||||
return node.node_id not in self._nodes_to_track
|
||||
|
||||
def get_folding_mode(self) -> str:
|
||||
return self._folding_mode
|
||||
|
||||
def get_nodes_to_track(self) -> set[str]:
|
||||
return self._nodes_to_track
|
||||
|
||||
|
||||
class NodeFactory:
|
||||
"""Factory for creating nodes from data with JSON path tracking"""
|
||||
|
||||
def __init__(self, id_generator: NodeIdGenerator, folding_manager: FoldingManager):
|
||||
self.id_generator = id_generator
|
||||
self.folding_manager = folding_manager
|
||||
self._nodes_by_id = {}
|
||||
self._node_paths = {} # node_id -> json_path mapping
|
||||
self._node_parents = {} # node_id -> parent_node mapping
|
||||
|
||||
def create_node(self, key: Any, data: Any, level: int = 0, json_path: str = "", parent_node: Any = None) -> Node:
|
||||
"""Creates appropriate node type based on data with path tracking"""
|
||||
if isinstance(data, list):
|
||||
return self._create_list_node(key, data, level, json_path, parent_node)
|
||||
elif isinstance(data, dict):
|
||||
return self._create_dict_node(key, data, level, json_path, parent_node)
|
||||
else:
|
||||
return self._create_value_node(key, data, json_path, parent_node)
|
||||
|
||||
def _create_list_node(self, key: Any, data: list, level: int, json_path: str, parent_node: Any) -> ListNode:
|
||||
node_id = self.id_generator.generate()
|
||||
if level <= 1 and key not in NODES_KEYS_TO_NOT_EXPAND:
|
||||
self.folding_manager._nodes_to_track.add(node_id)
|
||||
|
||||
node = ListNode(data, node_id, level)
|
||||
self._nodes_by_id[node_id] = (key, node)
|
||||
self._node_paths[node_id] = json_path
|
||||
self._node_parents[node_id] = parent_node
|
||||
|
||||
for index, item in enumerate(data):
|
||||
child_path = f"{json_path}[{index}]" if json_path else f"[{index}]"
|
||||
node.children.append(self.create_node(index, item, level + 1, child_path, node))
|
||||
|
||||
return node
|
||||
|
||||
def _create_dict_node(self, key: Any, data: dict, level: int, json_path: str, parent_node: Any) -> DictNode:
|
||||
node_id = self.id_generator.generate()
|
||||
if level <= 1 and key not in NODES_KEYS_TO_NOT_EXPAND:
|
||||
self.folding_manager._nodes_to_track.add(node_id)
|
||||
|
||||
node = DictNode(data, node_id, level)
|
||||
self._nodes_by_id[node_id] = (key, node)
|
||||
self._node_paths[node_id] = json_path
|
||||
self._node_parents[node_id] = parent_node
|
||||
|
||||
for child_key, value in data.items():
|
||||
child_path = f"{json_path}.{child_key}" if json_path else str(child_key)
|
||||
node.children[child_key] = self.create_node(child_key, value, level + 1, child_path, node)
|
||||
|
||||
return node
|
||||
|
||||
def _create_value_node(self, key: Any, data: Any, json_path: str, parent_node: Any) -> ValueNode:
|
||||
hint = NODE_OBJECT if key == TAG_OBJECT else None
|
||||
node = ValueNode(data, hint)
|
||||
# Value nodes don't have node_id, but we can still track their path for hooks
|
||||
return node
|
||||
|
||||
def get_node_by_id(self, node_id: str) -> tuple[Any, Node]:
|
||||
return self._nodes_by_id[node_id]
|
||||
|
||||
def get_node_path(self, node_id: str) -> str:
|
||||
return self._node_paths.get(node_id, "")
|
||||
|
||||
def get_node_parent(self, node_id: str) -> Any:
|
||||
return self._node_parents.get(node_id, None)
|
||||
|
||||
def clear(self):
|
||||
"""Clears all stored nodes"""
|
||||
self._nodes_by_id.clear()
|
||||
self._node_paths.clear()
|
||||
self._node_parents.clear()
|
||||
|
||||
|
||||
class JsonViewerHelper:
|
||||
class_string = f"mmt-jsonviewer-string"
|
||||
class_bool = f"mmt-jsonviewer-bool"
|
||||
class_number = f"mmt-jsonviewer-number"
|
||||
class_null = f"mmt-jsonviewer-null"
|
||||
class_digest = f"mmt-jsonviewer-digest"
|
||||
class_object = f"mmt-jsonviewer-object"
|
||||
class_dataframe = f"mmt-jsonviewer-dataframe"
|
||||
|
||||
@staticmethod
|
||||
def is_sha256(_value):
|
||||
return (isinstance(_value, str) and
|
||||
len(_value) == 64 and
|
||||
all(c in '0123456789abcdefABCDEF' for c in _value))
|
||||
|
||||
@staticmethod
|
||||
def add_quotes(value: str) -> str:
|
||||
if '"' in value and "'" in value:
|
||||
return f'"{value.replace("\"", "\\\"")}"'
|
||||
elif '"' in value:
|
||||
return f"'{value}'"
|
||||
else:
|
||||
return f'"{value}"'
|
||||
|
||||
|
||||
class NodeRenderer:
|
||||
"""Single class handling all node rendering with new hook system"""
|
||||
|
||||
def __init__(self, session,
|
||||
jsonviewer_instance,
|
||||
folding_manager: FoldingManager,
|
||||
commands: JsonViewerCommands,
|
||||
helper: JsonViewerHelper,
|
||||
hook_manager: HookManager,
|
||||
node_factory: NodeFactory):
|
||||
self.session = session
|
||||
self.jsonviewer = jsonviewer_instance
|
||||
self.folding_manager = folding_manager
|
||||
self.commands = commands
|
||||
self.helper = helper
|
||||
self.hook_manager = hook_manager
|
||||
self.node_factory = node_factory
|
||||
|
||||
def render(self, key: Any, node: Node, json_path: str = "", parent_node: Any = None) -> Div:
|
||||
"""Main rendering method for any node"""
|
||||
must_expand = self.folding_manager.must_expand(node)
|
||||
|
||||
return Div(
|
||||
self._create_folding_icon(node, must_expand),
|
||||
Span(f'{key} : ') if key is not None else None,
|
||||
self._render_value(key, node, must_expand, json_path, parent_node),
|
||||
style=f"margin-left: {INDENT_SIZE}px;",
|
||||
id=getattr(node, "node_id", None)
|
||||
)
|
||||
|
||||
def _create_folding_icon(self, node: Node, must_expand: Optional[bool]) -> Optional[Span]:
|
||||
"""Creates folding/unfolding icon"""
|
||||
if must_expand is None:
|
||||
return None
|
||||
|
||||
return Span(
|
||||
icon_expanded if must_expand else icon_collapsed,
|
||||
cls="icon-16-inline mmt-jsonviewer-folding",
|
||||
style=f"margin-left: -{INDENT_SIZE}px;",
|
||||
**self.commands.fold(
|
||||
node.node_id,
|
||||
FoldingMode.COLLAPSE if must_expand else FoldingMode.EXPAND
|
||||
)
|
||||
)
|
||||
|
||||
def _render_value(self, key: Any,
|
||||
node: Node,
|
||||
must_expand: Optional[bool],
|
||||
json_path: str = "",
|
||||
parent_node: Any = None):
|
||||
"""Renders the value part of a node with new hook system"""
|
||||
if must_expand is False:
|
||||
return self._render_collapsed_indicator(node)
|
||||
|
||||
# Create hook context
|
||||
context = HookContext(
|
||||
key=key,
|
||||
node=node,
|
||||
helper=self.helper,
|
||||
jsonviewer=self.jsonviewer,
|
||||
json_path=json_path,
|
||||
parent_node=parent_node
|
||||
)
|
||||
|
||||
# Execute render hooks and check for results
|
||||
hook_results = self.hook_manager.execute_hooks(EventType.RENDER, context)
|
||||
|
||||
# If any hook returned a result, use the first one
|
||||
if hook_results:
|
||||
# Filter out None results
|
||||
valid_results = [result for result in hook_results if result is not None]
|
||||
if valid_results:
|
||||
return valid_results[0]
|
||||
|
||||
# No hooks matched or returned results, use default rendering
|
||||
if isinstance(node, DictNode):
|
||||
return self._render_dict_node(key, node)
|
||||
elif isinstance(node, ListNode):
|
||||
return self._render_list_node(key, node)
|
||||
else:
|
||||
return self._render_value_node(key, node)
|
||||
|
||||
def _render_collapsed_indicator(self, node: Node) -> Span:
|
||||
"""Renders collapsed indicator"""
|
||||
indicator = "[...]" if isinstance(node, ListNode) else "{...}"
|
||||
return Span(
|
||||
indicator,
|
||||
id=node.node_id,
|
||||
**self.commands.fold(node.node_id, FoldingMode.EXPAND)
|
||||
)
|
||||
|
||||
def _render_dict_node(self, key: Any, node: DictNode) -> Span:
|
||||
"""Renders dictionary node"""
|
||||
children_elements = []
|
||||
base_path = self.node_factory.get_node_path(node.node_id)
|
||||
|
||||
for child_key, child_node in node.children.items():
|
||||
child_path = f"{base_path}.{child_key}" if base_path else str(child_key)
|
||||
children_elements.append(self.render(child_key, child_node, child_path, node))
|
||||
|
||||
return Span(
|
||||
"{",
|
||||
*children_elements,
|
||||
Div("}"),
|
||||
id=node.node_id
|
||||
)
|
||||
|
||||
def _render_list_node(self, key: Any, node: ListNode) -> Span:
|
||||
"""Renders list node"""
|
||||
if self._should_render_list_as_grid(key, node):
|
||||
return self._render_list_as_grid(key, node)
|
||||
else:
|
||||
return self._render_list_as_array(key, node)
|
||||
|
||||
def _should_render_list_as_grid(self, key: Any, node: ListNode) -> bool:
|
||||
"""Determines if list should be rendered as grid"""
|
||||
if len(node.children) == 0:
|
||||
return False
|
||||
|
||||
sample_node = node.children[0]
|
||||
sample_value = sample_node.value
|
||||
|
||||
if sample_value is None:
|
||||
return False
|
||||
|
||||
type_ = type(sample_value)
|
||||
if type_ in (int, float, str, bool, list, dict, ValueNode):
|
||||
return False
|
||||
|
||||
# Check if hooks handle this type (simplified check)
|
||||
sample_context = HookContext(
|
||||
key=key,
|
||||
node=sample_node,
|
||||
helper=self.helper,
|
||||
jsonviewer=self.jsonviewer
|
||||
)
|
||||
hook_results = self.hook_manager.execute_hooks(EventType.RENDER, sample_context)
|
||||
if hook_results and any(result is not None for result in hook_results):
|
||||
return False
|
||||
|
||||
return all(type(item.value) == type_ for item in node.children)
|
||||
|
||||
def _render_list_as_grid(self, key: Any, node: ListNode) -> Span:
|
||||
"""Renders list as grid"""
|
||||
type_ = type(node.children[0].value)
|
||||
icon = icon_class
|
||||
str_value = type_.__name__.split(".")[-1]
|
||||
|
||||
data = [child.value.__dict__ for child in node.children]
|
||||
df = DataFrame(data)
|
||||
dg = DataGrid(self.session)
|
||||
dg.init_from_dataframe(df)
|
||||
|
||||
return Span(
|
||||
Span(
|
||||
Span(icon, cls="icon-16-inline mr-1"),
|
||||
Span(str_value),
|
||||
cls="mmt-jsonviewer-object"
|
||||
),
|
||||
dg,
|
||||
id=node.node_id
|
||||
)
|
||||
|
||||
def _render_list_as_array(self, key: Any, node: ListNode) -> Span:
|
||||
"""Renders list as array"""
|
||||
children_elements = []
|
||||
base_path = self.node_factory.get_node_path(node.node_id)
|
||||
|
||||
for index, child_node in enumerate(node.children):
|
||||
child_path = f"{base_path}[{index}]" if base_path else f"[{index}]"
|
||||
children_elements.append(self.render(index, child_node, child_path, node))
|
||||
|
||||
return Span(
|
||||
"[",
|
||||
*children_elements,
|
||||
Div("]"),
|
||||
)
|
||||
|
||||
def _render_value_node(self, key: Any, node: ValueNode) -> Span:
|
||||
"""Renders value node"""
|
||||
data_tooltip = None
|
||||
htmx_params = {}
|
||||
icon = None
|
||||
|
||||
if isinstance(node.value, bool): # order is important bool is an int in Python !
|
||||
str_value = "true" if node.value else "false"
|
||||
data_class = "bool"
|
||||
elif isinstance(node.value, (int, float)):
|
||||
str_value = str(node.value)
|
||||
data_class = "number"
|
||||
elif node.value is None:
|
||||
str_value = "null"
|
||||
data_class = "null"
|
||||
elif self.helper.is_sha256(node.value):
|
||||
str_value = str(node.value)
|
||||
data_class = "digest"
|
||||
htmx_params = self.commands.open_digest(self.jsonviewer.user_id, node.value)
|
||||
elif node.hint == NODE_OBJECT:
|
||||
icon = icon_class
|
||||
str_value = node.value.split(".")[-1]
|
||||
data_class = "object"
|
||||
elif isinstance(node.value, DataFrame):
|
||||
return self._render_dataframe_value(node.value)
|
||||
else:
|
||||
str_value, data_tooltip = self._format_string_value(node.value)
|
||||
data_class = "string"
|
||||
|
||||
return self._create_value_span(str_value, data_class, icon, data_tooltip, htmx_params)
|
||||
|
||||
def _render_dataframe_value(self, dataframe: DataFrame) -> Any:
|
||||
"""Renders DataFrame value"""
|
||||
dg = DataGrid(self.session)
|
||||
dg.init_from_dataframe(dataframe)
|
||||
return dg
|
||||
|
||||
def _format_string_value(self, value: Any) -> tuple[str, Optional[str]]:
|
||||
"""Formats string value with tooltip if too long"""
|
||||
as_str = str(value)
|
||||
if len(as_str) > MAX_TEXT_LENGTH:
|
||||
return as_str[:MAX_TEXT_LENGTH] + "...", as_str
|
||||
else:
|
||||
return self.helper.add_quotes(as_str), None
|
||||
|
||||
def _create_value_span(self, str_value: str, data_class: str, icon: Any,
|
||||
data_tooltip: Optional[str], htmx_params: dict) -> Span:
|
||||
"""Creates the final Span element for a value"""
|
||||
css_class = f"mmt-jsonviewer-{data_class}"
|
||||
if data_tooltip:
|
||||
css_class += " mmt-tooltip"
|
||||
|
||||
if icon:
|
||||
return Span(
|
||||
Span(icon, cls="icon-16-inline mr-1"),
|
||||
Span(str_value, data_tooltip=data_tooltip, **htmx_params),
|
||||
cls=css_class
|
||||
)
|
||||
|
||||
return Span(str_value, cls=css_class, data_tooltip=data_tooltip, **htmx_params)
|
||||
|
||||
|
||||
class JsonViewer(BaseComponentMultipleInstance):
|
||||
"""Main JsonViewer component with new hook system"""
|
||||
|
||||
COMPONENT_INSTANCE_ID = "Jsonviewer"
|
||||
|
||||
def __init__(self, session, _id, data=None, hooks: list[Hook] = None, key=None, boundaries=None):
|
||||
super().__init__(session, _id)
|
||||
self._key = key
|
||||
self.data = data
|
||||
self._boundaries = boundaries if boundaries else {"height": "600"}
|
||||
self._commands = JsonViewerCommands(self)
|
||||
|
||||
# Initialize hook system (transparent to user)
|
||||
self._hook_manager = HookManager()
|
||||
if hooks:
|
||||
self._hook_manager.add_hooks(hooks)
|
||||
|
||||
# Initialize helper components
|
||||
self._helper = JsonViewerHelper()
|
||||
self._id_generator = NodeIdGenerator(_id)
|
||||
self._folding_manager = FoldingManager()
|
||||
self._node_factory = NodeFactory(self._id_generator, self._folding_manager)
|
||||
|
||||
# Initialize renderer with hook manager
|
||||
self._node_renderer = NodeRenderer(
|
||||
session, self, self._folding_manager,
|
||||
self._commands, self._helper, self._hook_manager, self._node_factory
|
||||
)
|
||||
|
||||
# Create the initial node tree
|
||||
self.node = self._node_factory.create_node(None, data)
|
||||
|
||||
@property
|
||||
def user_id(self) -> str:
|
||||
"""Gets user_id from session or returns default"""
|
||||
return getattr(self, '_user_id', getattr(self._session, 'user_id', 'default_user'))
|
||||
|
||||
def set_data(self, data):
|
||||
"""Updates the data and recreates the node tree"""
|
||||
self.data = data
|
||||
self._id_generator.reset()
|
||||
self._node_factory.clear()
|
||||
self.node = self._node_factory.create_node(None, data)
|
||||
|
||||
def add_hook(self, hook: Hook):
|
||||
"""Adds a single hook to the viewer"""
|
||||
self._hook_manager.add_hook(hook)
|
||||
|
||||
def add_hooks(self, hooks: list[Hook]):
|
||||
"""Adds multiple hooks to the viewer"""
|
||||
self._hook_manager.add_hooks(hooks)
|
||||
|
||||
def clear_hooks(self):
|
||||
"""Removes all hooks from the viewer"""
|
||||
self._hook_manager.clear_hooks()
|
||||
|
||||
def set_node_folding(self, node_id: str, folding: str):
|
||||
"""Sets folding state for a specific node"""
|
||||
self._folding_manager.set_node_folding(node_id, folding)
|
||||
|
||||
def render_node(self, node_id: str):
|
||||
"""Renders a specific node by ID"""
|
||||
key, node = self._node_factory.get_node_by_id(node_id)
|
||||
json_path = self._node_factory.get_node_path(node_id)
|
||||
parent_node = self._node_factory.get_node_parent(node_id)
|
||||
return self._node_renderer.render(key, node, json_path, parent_node)
|
||||
|
||||
def set_folding_mode(self, folding_mode: str):
|
||||
"""Sets global folding mode"""
|
||||
self._folding_manager.set_folding_mode(folding_mode)
|
||||
|
||||
def get_folding_mode(self) -> str:
|
||||
"""Gets current folding mode"""
|
||||
return self._folding_manager.get_folding_mode()
|
||||
|
||||
def open_digest(self, user_id: str, digest: str):
|
||||
"""Opens digest - preserves original method"""
|
||||
return self._owner.db_engine_headers(user_id, digest)
|
||||
|
||||
def __ft__(self):
|
||||
"""FastHTML rendering method"""
|
||||
if self.node is None:
|
||||
return Div("No data to display", cls="mmt-jsonviewer", id=f"{self._id}")
|
||||
|
||||
return Div(
|
||||
Div(
|
||||
self._node_renderer.render(None, self.node, "", None),
|
||||
id=f"{self._id}-root",
|
||||
style="margin-left: 0px;"
|
||||
),
|
||||
cls="mmt-jsonviewer",
|
||||
id=f"{self._id}",
|
||||
**apply_boundaries(self._boundaries)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Equality comparison"""
|
||||
if type(other) is type(self):
|
||||
return self._key is not None and self._key == other._key
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
"""Hash method"""
|
||||
return hash(self._key) if self._key is not None else super().__hash__()
|
||||
0
src/components/jsonviewer/components/__init__.py
Normal file
0
src/components/jsonviewer/components/__init__.py
Normal file
10
src/components/jsonviewer/constants.py
Normal file
10
src/components/jsonviewer/constants.py
Normal file
@@ -0,0 +1,10 @@
|
||||
ROUTE_ROOT = "/jsonviewer"
|
||||
|
||||
INDENT_SIZE = 20
|
||||
MAX_TEXT_LENGTH = 50
|
||||
|
||||
NODE_OBJECT = "Object"
|
||||
NODES_KEYS_TO_NOT_EXPAND = ["Dataframe", "__parent__"]
|
||||
|
||||
class Routes:
|
||||
Fold = "/fold"
|
||||
386
src/components/jsonviewer/hooks.py
Normal file
386
src/components/jsonviewer/hooks.py
Normal file
@@ -0,0 +1,386 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
|
||||
class EventType(Enum):
|
||||
RENDER = "render"
|
||||
CLICK = "click"
|
||||
HOVER = "hover"
|
||||
FOCUS = "focus"
|
||||
|
||||
|
||||
class DefaultEditableTypes:
|
||||
pass
|
||||
|
||||
|
||||
class HookContext:
|
||||
"""Enhanced context object passed to hook executors"""
|
||||
|
||||
def __init__(self, key: Any, node: Any, helper: Any, jsonviewer: Any,
|
||||
json_path: str = None, parent_node: Any = None):
|
||||
self.key = key
|
||||
self.node = node
|
||||
self.helper = helper
|
||||
self.jsonviewer = jsonviewer
|
||||
self.json_path = json_path or ""
|
||||
self.parent_node = parent_node
|
||||
self.metadata = {}
|
||||
|
||||
def get_node_type(self) -> str:
|
||||
"""Returns string representation of node type"""
|
||||
if hasattr(self.node, '__class__'):
|
||||
return self.node.__class__.__name__
|
||||
return type(self.node.value).__name__ if hasattr(self.node, 'value') else "unknown"
|
||||
|
||||
def get_value(self) -> Any:
|
||||
"""Gets the actual value from the node"""
|
||||
return getattr(self.node, 'value', self.node)
|
||||
|
||||
def is_leaf_node(self) -> bool:
|
||||
"""Checks if this is a leaf node (no children)"""
|
||||
return not hasattr(self.node, 'children') or not self.node.children
|
||||
|
||||
|
||||
class Condition(ABC):
|
||||
"""Base class for all conditions"""
|
||||
|
||||
@abstractmethod
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
pass
|
||||
|
||||
|
||||
class WhenLongText(Condition):
|
||||
"""Condition: text length > threshold"""
|
||||
|
||||
def __init__(self, threshold: int = 100):
|
||||
self.threshold = threshold
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
value = context.get_value()
|
||||
return isinstance(value, str) and len(value) > self.threshold
|
||||
|
||||
|
||||
class WhenEditable(Condition):
|
||||
"""Condition: node is editable (configurable logic)"""
|
||||
|
||||
def __init__(self, editable_paths: list[str] = None, editable_types: list[type] = DefaultEditableTypes):
|
||||
self.editable_paths = set(editable_paths or [])
|
||||
if editable_types is None:
|
||||
self.editable_types = set()
|
||||
else:
|
||||
self.editable_types = set([str, int, float, bool] if editable_types is DefaultEditableTypes else editable_types)
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
# Check if path is in editable paths
|
||||
if self.editable_paths and context.json_path in self.editable_paths:
|
||||
return True
|
||||
|
||||
# Check if type is editable
|
||||
value = context.get_value()
|
||||
return type(value) in self.editable_types and context.is_leaf_node()
|
||||
|
||||
|
||||
class WhenType(Condition):
|
||||
"""Condition: node value is of specific type"""
|
||||
|
||||
def __init__(self, target_type: type):
|
||||
self.target_type = target_type
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
value = context.get_value()
|
||||
return isinstance(value, self.target_type)
|
||||
|
||||
|
||||
class WhenKey(Condition):
|
||||
"""Condition: node key matches pattern"""
|
||||
|
||||
def __init__(self, key_pattern: Any):
|
||||
self.key_pattern = key_pattern
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
if callable(self.key_pattern):
|
||||
return self.key_pattern(context.key)
|
||||
return context.key == self.key_pattern
|
||||
|
||||
|
||||
class WhenPath(Condition):
|
||||
"""Condition: JSON path matches pattern"""
|
||||
|
||||
def __init__(self, path_pattern: str):
|
||||
self.path_pattern = path_pattern
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
import re
|
||||
return bool(re.match(self.path_pattern, context.json_path))
|
||||
|
||||
|
||||
class WhenValue(Condition):
|
||||
"""Condition: node value matches specific value or predicate"""
|
||||
|
||||
def __init__(self, target_value: Any = None, predicate: Callable[[Any], bool] = None):
|
||||
if target_value is not None and predicate is not None:
|
||||
raise ValueError("Cannot specify both target_value and predicate")
|
||||
if target_value is None and predicate is None:
|
||||
raise ValueError("Must specify either target_value or predicate")
|
||||
|
||||
self.target_value = target_value
|
||||
self.predicate = predicate
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
value = context.get_value()
|
||||
|
||||
if self.predicate:
|
||||
return self.predicate(value)
|
||||
else:
|
||||
return value == self.target_value
|
||||
|
||||
|
||||
class CompositeCondition(Condition):
|
||||
"""Allows combining conditions with AND/OR logic"""
|
||||
|
||||
def __init__(self, conditions: list[Condition], operator: str = "AND"):
|
||||
self.conditions = conditions
|
||||
self.operator = operator.upper()
|
||||
|
||||
def evaluate(self, context: HookContext) -> bool:
|
||||
if not self.conditions:
|
||||
return True
|
||||
|
||||
results = [condition.evaluate(context) for condition in self.conditions]
|
||||
|
||||
if self.operator == "AND":
|
||||
return all(results)
|
||||
elif self.operator == "OR":
|
||||
return any(results)
|
||||
else:
|
||||
raise ValueError(f"Unknown operator: {self.operator}")
|
||||
|
||||
|
||||
class Hook:
|
||||
"""Represents a complete hook with event, conditions, and executor"""
|
||||
|
||||
def __init__(self, event_type: EventType, conditions: list[Condition],
|
||||
executor: Callable, requires_modification: bool = False):
|
||||
self.event_type = event_type
|
||||
self.conditions = conditions
|
||||
self.executor = executor
|
||||
self.requires_modification = requires_modification
|
||||
|
||||
def matches(self, event_type: EventType, context: HookContext) -> bool:
|
||||
"""Checks if this hook should be executed for given event and context"""
|
||||
if self.event_type != event_type:
|
||||
return False
|
||||
|
||||
return all(condition.evaluate(context) for condition in self.conditions)
|
||||
|
||||
def execute(self, context: HookContext) -> Any:
|
||||
"""Executes the hook with given context"""
|
||||
return self.executor(context)
|
||||
|
||||
|
||||
class HookBuilder:
|
||||
"""Builder class for creating hooks with fluent interface"""
|
||||
|
||||
def __init__(self):
|
||||
self._event_type: Optional[EventType] = None
|
||||
self._conditions: list[Condition] = []
|
||||
self._executor: Optional[Callable] = None
|
||||
self._requires_modification: bool = False
|
||||
|
||||
# Event specification methods
|
||||
def on_render(self):
|
||||
"""Hook will be triggered on render event"""
|
||||
self._event_type = EventType.RENDER
|
||||
return self
|
||||
|
||||
def on_click(self):
|
||||
"""Hook will be triggered on click event"""
|
||||
self._event_type = EventType.CLICK
|
||||
return self
|
||||
|
||||
def on_hover(self):
|
||||
"""Hook will be triggered on hover event"""
|
||||
self._event_type = EventType.HOVER
|
||||
return self
|
||||
|
||||
def on_focus(self):
|
||||
"""Hook will be triggered on focus event"""
|
||||
self._event_type = EventType.FOCUS
|
||||
return self
|
||||
|
||||
# Condition methods
|
||||
def when_long_text(self, threshold: int = 100):
|
||||
"""Add condition: text length > threshold"""
|
||||
self._conditions.append(WhenLongText(threshold))
|
||||
return self
|
||||
|
||||
def when_editable(self, editable_paths: list[str] = None, editable_types: list[type] = None):
|
||||
"""Add condition: node is editable"""
|
||||
self._conditions.append(WhenEditable(editable_paths, editable_types))
|
||||
return self
|
||||
|
||||
def when_type(self, target_type: type):
|
||||
"""Add condition: node value is of specific type"""
|
||||
self._conditions.append(WhenType(target_type))
|
||||
return self
|
||||
|
||||
def when_key(self, key_pattern: Any):
|
||||
"""Add condition: node key matches pattern"""
|
||||
self._conditions.append(WhenKey(key_pattern))
|
||||
return self
|
||||
|
||||
def when_path(self, path_pattern: str):
|
||||
"""Add condition: JSON path matches pattern"""
|
||||
self._conditions.append(WhenPath(path_pattern))
|
||||
return self
|
||||
|
||||
def when_value(self, target_value: Any = None, predicate: Callable[[Any], bool] = None):
|
||||
"""Add condition: node value matches specific value or predicate"""
|
||||
self._conditions.append(WhenValue(target_value, predicate))
|
||||
return self
|
||||
|
||||
def when_custom(self, condition):
|
||||
"""Add custom condition (supports both Condition instances and predicate functions)."""
|
||||
if callable(condition) and not isinstance(condition, Condition):
|
||||
# Wrap the predicate function in a Condition class
|
||||
class PredicateCondition(Condition):
|
||||
def __init__(self, predicate):
|
||||
self.predicate = predicate
|
||||
|
||||
def evaluate(self, context):
|
||||
return self.predicate(context)
|
||||
|
||||
condition = PredicateCondition(condition) # Pass the function to the wrapper
|
||||
|
||||
elif not isinstance(condition, Condition):
|
||||
raise ValueError("when_custom expects a Condition instance or a callable predicate.")
|
||||
|
||||
self._conditions.append(condition)
|
||||
return self
|
||||
|
||||
def when_all(self, conditions: list[Condition]):
|
||||
"""Add composite condition with AND logic"""
|
||||
self._conditions.append(CompositeCondition(conditions, "AND"))
|
||||
return self
|
||||
|
||||
def when_any(self, conditions: list[Condition]):
|
||||
"""Add composite condition with OR logic"""
|
||||
self._conditions.append(CompositeCondition(conditions, "OR"))
|
||||
return self
|
||||
|
||||
# Modification flag
|
||||
def requires_modification(self):
|
||||
"""Indicates this hook will modify the state"""
|
||||
self._requires_modification = True
|
||||
return self
|
||||
|
||||
# Execution
|
||||
def execute(self, executor: Callable) -> Hook:
|
||||
"""Sets the executor function and builds the hook"""
|
||||
if not self._event_type:
|
||||
raise ValueError("Event type must be specified (use on_render(), on_click(), etc.)")
|
||||
|
||||
if not executor:
|
||||
raise ValueError("Executor function must be provided")
|
||||
|
||||
self._executor = executor
|
||||
|
||||
return Hook(
|
||||
event_type=self._event_type,
|
||||
conditions=self._conditions,
|
||||
executor=self._executor,
|
||||
requires_modification=self._requires_modification
|
||||
)
|
||||
|
||||
|
||||
class HookManager:
|
||||
"""Manages and executes hooks for JsonViewer"""
|
||||
|
||||
def __init__(self):
|
||||
self.hooks: list[Hook] = []
|
||||
|
||||
def add_hook(self, hook: Hook):
|
||||
"""Adds a hook to the manager"""
|
||||
self.hooks.append(hook)
|
||||
|
||||
def add_hooks(self, hooks: list[Hook]):
|
||||
"""Adds multiple hooks to the manager"""
|
||||
self.hooks.extend(hooks)
|
||||
|
||||
def find_matching_hooks(self, event_type: EventType, context: HookContext) -> list[Hook]:
|
||||
"""Finds all hooks that match the event and context"""
|
||||
return [hook for hook in self.hooks if hook.matches(event_type, context)]
|
||||
|
||||
def execute_hooks(self, event_type: EventType, context: HookContext) -> list[Any]:
|
||||
"""Executes all matching hooks and returns results"""
|
||||
matching_hooks = self.find_matching_hooks(event_type, context)
|
||||
results = []
|
||||
|
||||
for hook in matching_hooks:
|
||||
try:
|
||||
result = hook.execute(context)
|
||||
results.append(result)
|
||||
|
||||
# If this hook requires modification, we might want to stop here
|
||||
# or handle the modification differently
|
||||
if hook.requires_modification:
|
||||
# Could add callback to parent component here
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
# Log error but continue with other hooks
|
||||
print(f"Hook execution error: {e}")
|
||||
continue
|
||||
|
||||
return results
|
||||
|
||||
def clear_hooks(self):
|
||||
"""Removes all hooks"""
|
||||
self.hooks.clear()
|
||||
|
||||
|
||||
# Example usage and factory functions
|
||||
def create_long_text_viewer_hook(threshold: int = 100) -> Hook:
|
||||
"""Factory function for common long text viewer hook"""
|
||||
|
||||
def text_viewer_component(context: HookContext):
|
||||
from fasthtml.components import Div, Span
|
||||
|
||||
value = context.get_value()
|
||||
truncated = value[:threshold] + "..."
|
||||
|
||||
return Div(
|
||||
Span(truncated, cls="text-truncated"),
|
||||
Span("Click to expand", cls="expand-hint"),
|
||||
cls="long-text-viewer"
|
||||
)
|
||||
|
||||
return (HookBuilder()
|
||||
.on_render()
|
||||
.when_long_text(threshold)
|
||||
.execute(text_viewer_component))
|
||||
|
||||
|
||||
def create_inline_editor_hook(editable_paths: list[str] = None) -> Hook:
|
||||
"""Factory function for common inline editor hook"""
|
||||
|
||||
def inline_editor_component(context: HookContext):
|
||||
from fasthtml.components import Input, Div
|
||||
|
||||
value = context.get_value()
|
||||
|
||||
return Div(
|
||||
Input(
|
||||
value=str(value),
|
||||
type="text" if isinstance(value, str) else "number",
|
||||
cls="inline-editor"
|
||||
),
|
||||
cls="editable-field"
|
||||
)
|
||||
|
||||
return (HookBuilder()
|
||||
.on_click()
|
||||
.when_editable(editable_paths)
|
||||
.requires_modification()
|
||||
.execute(inline_editor_component))
|
||||
@@ -20,7 +20,7 @@ def get(session):
|
||||
|
||||
|
||||
@rt(Routes.AddRepository)
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository: str, table: str, tab_boundaries:str):
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository: str, table: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddRepository} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository=}, {table=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id) # Repository
|
||||
@@ -34,8 +34,9 @@ def get(session, _id: str, repository_name: str):
|
||||
|
||||
|
||||
@rt(Routes.AddTable)
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository_name: str, table_name: str, tab_boundaries:str):
|
||||
logger.debug(f"Entering {Routes.AddTable} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository_name=}, {table_name=}, {tab_boundaries=}")
|
||||
def post(session, _id: str, tab_id: str, form_id: str, repository_name: str, table_name: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.AddTable} with args {debug_session(session)}, {_id=}, {tab_id=}, {form_id=}, {repository_name=}, {table_name=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.add_new_table(tab_id, form_id, repository_name, table_name, json.loads(tab_boundaries))
|
||||
|
||||
@@ -48,7 +49,8 @@ def put(session, _id: str, repository: str):
|
||||
|
||||
|
||||
@rt(Routes.ShowTable)
|
||||
def get(session, _id: str, repository: str, table: str, tab_boundaries:str):
|
||||
logger.debug(f"Entering {Routes.ShowTable} with args {debug_session(session)}, {_id=}, {repository=}, {table=}, {tab_boundaries=}")
|
||||
def get(session, _id: str, repository: str, table: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.ShowTable} with args {debug_session(session)}, {_id=}, {repository=}, {table=}, {tab_boundaries=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.show_table(repository, table, json.loads(tab_boundaries))
|
||||
|
||||
@@ -152,6 +152,14 @@ class MyTabs(BaseComponent):
|
||||
def get_tab_content_by_key(self, key):
|
||||
return self.tabs_by_key[key].content if key in self.tabs_by_key else None
|
||||
|
||||
def show_tab(self, tab_key, updated_content=None):
|
||||
if updated_content:
|
||||
tab_id = self._get_tab_id_from_tab_key(tab_key)
|
||||
self.set_tab_content(tab_id, updated_content)
|
||||
|
||||
self.select_tab_by_key(tab_key)
|
||||
return self.refresh()
|
||||
|
||||
def refresh(self):
|
||||
return self.render(oob=True)
|
||||
|
||||
@@ -188,6 +196,13 @@ class MyTabs(BaseComponent):
|
||||
active_tab = next(filter(lambda t: t.active, self.tabs), None)
|
||||
return active_tab.content if active_tab else None
|
||||
|
||||
def get_active_tab_key(self):
|
||||
active_tab = next(filter(lambda t: t.active, self.tabs), None)
|
||||
return active_tab.key if active_tab else None
|
||||
|
||||
def _get_tab_id_from_tab_key(self, tab_key):
|
||||
return self.tabs_by_key[tab_key].id if tab_key in self.tabs_by_key else None
|
||||
|
||||
@staticmethod
|
||||
def create_component_id(session):
|
||||
prefix = f"{MY_TABS_INSTANCE_ID}{session['user_id']}"
|
||||
|
||||
@@ -1,30 +1,27 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fastcore.xml import FT
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.BaseComponent import BaseComponentSingleton
|
||||
from components.undo_redo.assets.icons import icon_redo, icon_undo
|
||||
from components.undo_redo.commands import UndoRedoCommandManager
|
||||
from components.undo_redo.constants import UNDO_REDO_INSTANCE_ID
|
||||
from components.undo_redo.constants import UNDO_REDO_INSTANCE_ID, UndoRedoAttrs
|
||||
from components_helpers import mk_icon, mk_tooltip
|
||||
from core.settings_management import NoDefault
|
||||
|
||||
logger = logging.getLogger("UndoRedoApp")
|
||||
|
||||
|
||||
class CommandHistory(ABC):
|
||||
def __init__(self, name, desc, owner):
|
||||
self.name = name
|
||||
self.desc = desc
|
||||
self.owner = owner
|
||||
|
||||
@abstractmethod
|
||||
def undo(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def redo(self):
|
||||
pass
|
||||
@dataclass
|
||||
class CommandHistory:
|
||||
attrs: UndoRedoAttrs
|
||||
tab_key: str | None
|
||||
digest: str | None # digest to remember
|
||||
entry: str # digest to remember
|
||||
key: str # key
|
||||
path: str # path within the key if only on subitem needs to be updated
|
||||
|
||||
|
||||
class UndoRedo(BaseComponentSingleton):
|
||||
@@ -35,35 +32,91 @@ class UndoRedo(BaseComponentSingleton):
|
||||
self.index = -1
|
||||
self.history = []
|
||||
self._commands = UndoRedoCommandManager(self)
|
||||
self._db_engine = settings_manager.get_db_engine()
|
||||
|
||||
def push(self, command: CommandHistory):
|
||||
self.history = self.history[:self.index + 1]
|
||||
def snapshot(self, undo_redo_attrs: UndoRedoAttrs, entry, key, path=None):
|
||||
digest = self._settings_manager.get_digest(self._session, entry) # get the current digest (the last one)
|
||||
active_tab_key = self.tabs_manager.get_active_tab_key()
|
||||
|
||||
# init the history if this is the first call
|
||||
if len(self.history) == 0:
|
||||
digest_history = self._settings_manager.history(self._session, entry, digest, 2)
|
||||
command = CommandHistory(undo_redo_attrs,
|
||||
active_tab_key,
|
||||
digest_history[1] if len(digest_history) > 1 else None,
|
||||
entry,
|
||||
key,
|
||||
path)
|
||||
self.history.append(command)
|
||||
self.index = 0
|
||||
|
||||
command = CommandHistory(undo_redo_attrs, active_tab_key, digest, entry, key, path)
|
||||
|
||||
self.history = self.history[:self.index + 1] #
|
||||
self.history.append(command)
|
||||
self.index += 1
|
||||
self.index = len(self.history) - 1
|
||||
|
||||
def undo(self):
|
||||
logger.debug(f"Undo command")
|
||||
if self.index < 0 :
|
||||
if self.index < 1:
|
||||
logger.debug(f" No command to undo.")
|
||||
return self
|
||||
|
||||
command = self.history[self.index]
|
||||
logger.debug(f" Undoing command {command.name} ({command.desc})")
|
||||
res = command.undo()
|
||||
current = self.history[self.index]
|
||||
current_state = self._settings_manager.load(self._session, None, digest=current.digest)
|
||||
|
||||
previous = self.history[self.index - 1]
|
||||
previous_state = self._settings_manager.load(self._session, None, digest=previous.digest)
|
||||
|
||||
# reapply the state
|
||||
if previous_state is not NoDefault:
|
||||
current_state[current.key] = previous_state[current.key]
|
||||
else:
|
||||
del current_state[current.key]
|
||||
self._settings_manager.save(self._session, current.entry, current_state)
|
||||
|
||||
self.index -= 1
|
||||
return self, res
|
||||
|
||||
if current.attrs.on_undo is not None:
|
||||
ret = current.attrs.on_undo()
|
||||
if current.attrs.update_tab and current.tab_key is not None and current.tab_key != self.tabs_manager.get_active_tab_key():
|
||||
ret = self.tabs_manager.show_tab(current.tab_key)
|
||||
elif isinstance(ret, FT) and 'id' in ret.attrs:
|
||||
ret.attrs["hx-swap-oob"] = "true"
|
||||
return self, ret
|
||||
else:
|
||||
return self
|
||||
|
||||
def redo(self):
|
||||
logger.debug("Redo command")
|
||||
if self.index == len(self.history) - 1:
|
||||
logger.debug(f" No command to redo.")
|
||||
logger.debug(f"Redo command")
|
||||
if self.index >= len(self.history) - 1:
|
||||
logger.debug(f" No command to undo.")
|
||||
return self
|
||||
|
||||
current = self.history[self.index]
|
||||
current_state = self._settings_manager.load(self._session, None, digest=current.digest)
|
||||
|
||||
next_ = self.history[self.index + 1]
|
||||
next_state = self._settings_manager.load(self._session, None, digest=next_.digest)
|
||||
|
||||
# reapply the state
|
||||
if current_state is not NoDefault:
|
||||
current_state[current.key] = next_state[current.key]
|
||||
else:
|
||||
current_state = {current.key: next_state[current.key]}
|
||||
self._settings_manager.save(self._session, current.entry, current_state)
|
||||
|
||||
self.index += 1
|
||||
command = self.history[self.index]
|
||||
logger.debug(f" Redoing command {command.name} ({command.desc})")
|
||||
res = command.redo()
|
||||
return self, res
|
||||
|
||||
if current.attrs.on_redo is not None:
|
||||
ret = current.attrs.on_undo()
|
||||
if current.attrs.update_tab and current.tab_key is not None and current.tab_key != self.tabs_manager.get_active_tab_key():
|
||||
ret = self.tabs_manager.show_tab(current.tab_key)
|
||||
elif isinstance(ret, FT) and 'id' in ret.attrs:
|
||||
ret.attrs["hx-swap-oob"] = "true"
|
||||
return self, ret
|
||||
else:
|
||||
return self
|
||||
|
||||
def refresh(self):
|
||||
return self.__ft__(oob=True)
|
||||
@@ -83,7 +136,7 @@ class UndoRedo(BaseComponentSingleton):
|
||||
return mk_tooltip(mk_icon(icon_undo,
|
||||
size=24,
|
||||
**self._commands.undo()),
|
||||
f"Undo '{command.name}'.")
|
||||
f"Undo '{command.attrs.name}'.")
|
||||
else:
|
||||
return mk_tooltip(mk_icon(icon_undo,
|
||||
size=24,
|
||||
@@ -93,11 +146,11 @@ class UndoRedo(BaseComponentSingleton):
|
||||
|
||||
def _mk_redo(self):
|
||||
if self._can_redo():
|
||||
command = self.history[self.index]
|
||||
command = self.history[self.index + 1]
|
||||
return mk_tooltip(mk_icon(icon_redo,
|
||||
size=24,
|
||||
**self._commands.redo()),
|
||||
f"Redo '{command.name}'.")
|
||||
f"Redo '{command.attrs.name}'.")
|
||||
else:
|
||||
return mk_tooltip(mk_icon(icon_redo,
|
||||
size=24,
|
||||
@@ -106,7 +159,7 @@ class UndoRedo(BaseComponentSingleton):
|
||||
"Nothing to redo.")
|
||||
|
||||
def _can_undo(self):
|
||||
return self.index >= 0
|
||||
return self.index >= 1
|
||||
|
||||
def _can_redo(self):
|
||||
return self.index < len(self.history) - 1
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Callable
|
||||
|
||||
UNDO_REDO_INSTANCE_ID = "__UndoRedo__"
|
||||
|
||||
ROUTE_ROOT = "/undo"
|
||||
@@ -5,4 +8,17 @@ ROUTE_ROOT = "/undo"
|
||||
|
||||
class Routes:
|
||||
Undo = "/undo"
|
||||
Redo = "/redo"
|
||||
Redo = "/redo"
|
||||
|
||||
|
||||
@dataclass
|
||||
class UndoRedoAttrs:
|
||||
name: str
|
||||
desc: str = None
|
||||
update_tab: bool = True
|
||||
on_undo: Callable = None
|
||||
on_redo: Callable = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.on_redo is None:
|
||||
self.on_redo = self.on_undo
|
||||
@@ -4,12 +4,21 @@
|
||||
|
||||
using `_id={WORKFLOW_DESIGNER_INSTANCE_ID}{session['user_id']}{get_unique_id()}`
|
||||
|
||||
| Name | value |
|
||||
|-----------------|--------------------|
|
||||
| Canvas | `c_{self._id}` |
|
||||
| Designer | `d_{self._id}` |
|
||||
| Error Message | `err_{self._id}` |
|
||||
| Properties | `p_{self._id}` |
|
||||
| Spliter | `s_{self._id}` |
|
||||
| Top element | `t_{self._id}` |
|
||||
| Name | value |
|
||||
|----------------------------------|--------------------------------|
|
||||
| Canvas | `c_{self._id}` |
|
||||
| Designer | `d_{self._id}` |
|
||||
| Error Message | `err_{self._id}` |
|
||||
| Properties | `p_{self._id}` |
|
||||
| Properties Input Section | `pi_{self._id}` |
|
||||
| Properties Output Section | `po_{self._id}` |
|
||||
| Properties Properties Section | `pp_{self._id}` |
|
||||
| Properties Properties drag top | `ppt_{self._id}` |
|
||||
| Properties Properties drag left | `ppl_{self._id}` |
|
||||
| Properties Properties drag right | `ppr_{self._id}` |
|
||||
| Properties Properties content | `ppc_{self._id}` |
|
||||
| Spliter | `s_{self._id}` |
|
||||
| Top element | `t_{self._id}` |
|
||||
| Form for properties | `f_{self._id}_{component_id}` |
|
||||
| Form for output properties | `fo_{self._id}_{component_id}` |
|
||||
|
||||
|
||||
@@ -82,6 +82,14 @@ def post(session, _id: str, designer_height: int):
|
||||
return instance.set_designer_height(designer_height)
|
||||
|
||||
|
||||
@rt(Routes.UpdatePropertiesLayout)
|
||||
def post(session, _id: str, input_width: int, properties_width: int, output_width: int):
|
||||
logger.debug(
|
||||
f"Entering {Routes.UpdatePropertiesLayout} with args {debug_session(session)}, {_id=}, {input_width=}, {properties_width=}, {output_width=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.update_properties_layout(input_width, properties_width, output_width)
|
||||
|
||||
|
||||
@rt(Routes.SelectComponent)
|
||||
def post(session, _id: str, component_id: str):
|
||||
logger.debug(
|
||||
@@ -129,14 +137,20 @@ def post(session, _id: str, component_id: str, event_name: str, details: dict):
|
||||
|
||||
@rt(Routes.PlayWorkflow)
|
||||
def post(session, _id: str, tab_boundaries: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.PlayWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
logger.debug(f"Entering {Routes.PlayWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.play_workflow(json.loads(tab_boundaries))
|
||||
|
||||
|
||||
@rt(Routes.StopWorkflow)
|
||||
def post(session, _id: str):
|
||||
logger.debug(
|
||||
f"Entering {Routes.StopWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
logger.debug(f"Entering {Routes.StopWorkflow} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.stop_workflow()
|
||||
return instance.stop_workflow()
|
||||
|
||||
|
||||
@rt(Routes.Refresh)
|
||||
def post(session, _id: str):
|
||||
logger.debug(f"Entering {Routes.Refresh} with args {debug_session(session)}, {_id=}")
|
||||
instance = InstanceManager.get(session, _id)
|
||||
return instance.refresh()
|
||||
|
||||
@@ -47,8 +47,105 @@
|
||||
|
||||
.wkf-properties {
|
||||
box-sizing: border-box;
|
||||
position: relative;
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: var(--color-base-100); /* bg-base-100 */
|
||||
}
|
||||
|
||||
.wkf-properties-input, .wkf-properties-output {
|
||||
display: inline-block;
|
||||
vertical-align: top;
|
||||
padding: 10px;
|
||||
box-sizing: border-box;
|
||||
font-family: Arial, sans-serif;
|
||||
background-color: var(--color-base-100); /* bg-base-100 */
|
||||
overflow: auto;
|
||||
|
||||
}
|
||||
|
||||
.wkf-properties-input {
|
||||
border-width: 1px;
|
||||
border-top-left-radius: 0.5rem; /* rounded on left side */
|
||||
border-bottom-left-radius: 0.5rem;
|
||||
border-top-right-radius: 0; /* not rounded on right side */
|
||||
border-bottom-right-radius: 0;
|
||||
}
|
||||
|
||||
.wkf-properties-output {
|
||||
border-width: 1px;
|
||||
border-top-right-radius: 0.5rem; /* rounded on right side */
|
||||
border-bottom-right-radius: 0.5rem;
|
||||
border-top-left-radius: 0; /* not rounded on left side */
|
||||
border-bottom-left-radius: 0;
|
||||
}
|
||||
|
||||
.wkf-properties-properties {
|
||||
vertical-align: top;
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.wkf-properties-handle-left {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
width: 5px;
|
||||
height: 100%;
|
||||
cursor: ew-resize;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.wkf-properties-handle-right {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 0;
|
||||
width: 5px;
|
||||
height: 100%;
|
||||
cursor: ew-resize;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
.wkf-properties-top {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
cursor: move;
|
||||
padding: 4px;
|
||||
}
|
||||
|
||||
.wkf-properties-handle-top {
|
||||
background-image: radial-gradient(var(--color-splitter) 40%, transparent 0);
|
||||
background-repeat: repeat;
|
||||
background-size: 4px 4px;
|
||||
cursor: move;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
height: 8px;
|
||||
width: 20px;
|
||||
position: relative;
|
||||
top: 1px;
|
||||
}
|
||||
|
||||
.wkf-properties-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%; /* or inherit from a fixed-height parent */
|
||||
}
|
||||
|
||||
.wkf-properties-content-header {
|
||||
flex-shrink: 0; /* optional: prevent it from shrinking */
|
||||
}
|
||||
|
||||
.wkf-properties-content-form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex-grow: 1;
|
||||
overflow: hidden; /* prevent double scrollbars if needed */
|
||||
}
|
||||
|
||||
|
||||
.wkf-canvas {
|
||||
position: relative;
|
||||
box-sizing: border-box;
|
||||
@@ -193,5 +290,3 @@
|
||||
.wkf-connection-path-arrowhead-selected {
|
||||
fill:#ef4444 !important;;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
function bindWorkflowDesigner(elementId) {
|
||||
bindWorkflowDesignerToolbox(elementId)
|
||||
bindWorkflowDesignerSplitter(elementId)
|
||||
bindWorkflowProperties(elementId)
|
||||
}
|
||||
|
||||
function bindWorkflowDesignerToolbox(elementId) {
|
||||
@@ -204,7 +205,7 @@ function bindWorkflowDesignerToolbox(elementId) {
|
||||
// Also trigger server-side selection
|
||||
utils.makeRequest('/workflows/select-component', {
|
||||
component_id: designer.selectedComponent
|
||||
}, `#p_${elementId}`, "outerHTML");
|
||||
}, `#ppc_${elementId}`, "outerHTML");
|
||||
},
|
||||
|
||||
// Deselect all components
|
||||
@@ -612,3 +613,153 @@ function bindWorkflowDesignerSplitter(elementId) {
|
||||
}
|
||||
}
|
||||
|
||||
function bindWorkflowProperties(elementId) {
|
||||
let isDragging = false;
|
||||
let isResizing = false;
|
||||
let startX = 0;
|
||||
let startWidths = {};
|
||||
let resizeType = '';
|
||||
|
||||
console.debug("Binding Properties component for "+ elementId)
|
||||
properties_component = document.getElementById(`p_${elementId}`);
|
||||
if (properties_component == null) {
|
||||
console.error(`'Component ' p_${elementId}' is not found !' `)
|
||||
return
|
||||
}
|
||||
const totalWidth = properties_component.getBoundingClientRect().width
|
||||
console.debug("totalWidth", totalWidth)
|
||||
|
||||
const minPropertiesWidth = 352; // this value avoid scroll bars
|
||||
|
||||
const inputSection = document.getElementById(`pi_${elementId}`);
|
||||
const propertiesSection = document.getElementById(`pp_${elementId}`);
|
||||
const outputSection = document.getElementById(`po_${elementId}`);
|
||||
const dragHandle = document.getElementById(`ppt_${elementId}`);
|
||||
const leftHandle = document.getElementById(`ppl_${elementId}`);
|
||||
const rightHandle = document.getElementById(`ppr_${elementId}`);
|
||||
|
||||
// Drag and drop for moving properties section
|
||||
dragHandle.addEventListener('mousedown', (e) => {
|
||||
isDragging = true;
|
||||
startX = e.clientX;
|
||||
startWidths = {
|
||||
input: parseInt(inputSection.style.width),
|
||||
properties: parseInt(propertiesSection.style.width),
|
||||
output: parseInt(outputSection.style.width)
|
||||
};
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// Left resize handle
|
||||
leftHandle.addEventListener('mousedown', (e) => {
|
||||
isResizing = true;
|
||||
resizeType = 'left';
|
||||
startX = e.clientX;
|
||||
startWidths = {
|
||||
input: parseInt(inputSection.style.width),
|
||||
properties: parseInt(propertiesSection.style.width),
|
||||
output: parseInt(outputSection.style.width)
|
||||
};
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// Right resize handle
|
||||
rightHandle.addEventListener('mousedown', (e) => {
|
||||
isResizing = true;
|
||||
resizeType = 'right';
|
||||
startX = e.clientX;
|
||||
startWidths = {
|
||||
input: parseInt(inputSection.style.width),
|
||||
properties: parseInt(propertiesSection.style.width),
|
||||
output: parseInt(outputSection.style.width)
|
||||
};
|
||||
e.preventDefault();
|
||||
});
|
||||
|
||||
// Mouse move
|
||||
document.addEventListener('mousemove', (e) => {
|
||||
if (isDragging) {
|
||||
const deltaX = e.clientX - startX;
|
||||
let newInputWidth = startWidths.input + deltaX;
|
||||
let newOutputWidth = startWidths.output - deltaX;
|
||||
|
||||
// Constraints
|
||||
if (newInputWidth < 0) {
|
||||
newInputWidth = 0;
|
||||
newOutputWidth = totalWidth - startWidths.properties;
|
||||
}
|
||||
if (newOutputWidth < 0) {
|
||||
newOutputWidth = 0;
|
||||
newInputWidth = totalWidth - startWidths.properties;
|
||||
}
|
||||
|
||||
inputSection.style.width = newInputWidth + 'px';
|
||||
outputSection.style.width = newOutputWidth + 'px';
|
||||
}
|
||||
|
||||
if (isResizing) {
|
||||
const deltaX = e.clientX - startX;
|
||||
let newInputWidth = startWidths.input;
|
||||
let newPropertiesWidth = startWidths.properties;
|
||||
let newOutputWidth = startWidths.output;
|
||||
|
||||
if (resizeType === 'left') {
|
||||
newInputWidth = startWidths.input + deltaX;
|
||||
newPropertiesWidth = startWidths.properties - deltaX;
|
||||
|
||||
if (newInputWidth < 0) {
|
||||
newInputWidth = 0;
|
||||
newPropertiesWidth = startWidths.input + startWidths.properties;
|
||||
}
|
||||
if (newPropertiesWidth < minPropertiesWidth) {
|
||||
newPropertiesWidth = minPropertiesWidth;
|
||||
newInputWidth = totalWidth - minPropertiesWidth - startWidths.output;
|
||||
}
|
||||
} else if (resizeType === 'right') {
|
||||
newPropertiesWidth = startWidths.properties + deltaX;
|
||||
newOutputWidth = startWidths.output - deltaX;
|
||||
|
||||
if (newOutputWidth < 0) {
|
||||
newOutputWidth = 0;
|
||||
newPropertiesWidth = startWidths.properties + startWidths.output;
|
||||
}
|
||||
if (newPropertiesWidth < minPropertiesWidth) {
|
||||
newPropertiesWidth = minPropertiesWidth;
|
||||
newOutputWidth = totalWidth - startWidths.input - minPropertiesWidth;
|
||||
}
|
||||
}
|
||||
|
||||
inputSection.style.width = newInputWidth + 'px';
|
||||
propertiesSection.style.width = newPropertiesWidth + 'px';
|
||||
outputSection.style.width = newOutputWidth + 'px';
|
||||
}
|
||||
});
|
||||
|
||||
// Mouse up
|
||||
document.addEventListener('mouseup', () => {
|
||||
if (isDragging || isResizing) {
|
||||
// Send HTMX request with new dimensions
|
||||
const currentWidths = {
|
||||
input_width: parseInt(inputSection.style.width),
|
||||
properties_width: parseInt(propertiesSection.style.width),
|
||||
output_width: parseInt(outputSection.style.width)
|
||||
};
|
||||
|
||||
try {
|
||||
htmx.ajax('POST', '/workflows/update-properties-layout', {
|
||||
target: `#${elementId}`,
|
||||
headers: { "Content-Type": "application/x-www-form-urlencoded" },
|
||||
swap: "outerHTML",
|
||||
values: { _id: elementId, ...currentWidths }
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('HTMX request failed:', error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
isDragging = false;
|
||||
isResizing = false;
|
||||
resizeType = '';
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -23,3 +23,6 @@ icon_pause_circle = NotStr(
|
||||
# fluent RecordStop20Regular
|
||||
icon_stop_circle = NotStr(
|
||||
"""<svg name="stop" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M10 3a7 7 0 1 0 0 14a7 7 0 0 0 0-14zm-8 7a8 8 0 1 1 16 0a8 8 0 0 1-16 0zm5-2a1 1 0 0 1 1-1h4a1 1 0 0 1 1 1v4a1 1 0 0 1-1 1H8a1 1 0 0 1-1-1V8z" fill="currentColor"></path></g></svg>""")
|
||||
|
||||
# fluent ArrowClockwise20Regular
|
||||
icon_refresh = NotStr("""<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 20 20"><g fill="none"><path d="M3.066 9.05a7 7 0 0 1 12.557-3.22l.126.17H12.5a.5.5 0 1 0 0 1h4a.5.5 0 0 0 .5-.5V2.502a.5.5 0 0 0-1 0v2.207a8 8 0 1 0 1.986 4.775a.5.5 0 0 0-.998.064A7 7 0 1 1 3.066 9.05z" fill="currentColor"></path></g></svg>""")
|
||||
@@ -1,23 +1,6 @@
|
||||
from components.BaseCommandManager import BaseCommandManager
|
||||
from components.undo_redo.components.UndoRedo import CommandHistory
|
||||
from components.workflows.constants import Routes, ROUTE_ROOT
|
||||
|
||||
class AddConnectorCommand(CommandHistory):
|
||||
|
||||
def __init__(self, owner, connector):
|
||||
super().__init__("Add connector", "Add connector", owner)
|
||||
self.connector = connector
|
||||
|
||||
def undo(self):
|
||||
del self.owner.get_state().components[self.connector.id]
|
||||
self.owner.get_db().save_state(self.owner.get_key(), self.owner.get_state()) # update db
|
||||
return self.owner.refresh_designer(True)
|
||||
|
||||
def redo(self, oob=True):
|
||||
self.owner.get_state().components[self.connector.id] = self.connector
|
||||
self.owner.get_db().save_state(self.owner.get_key(), self.owner.get_state()) # update db
|
||||
return self.owner.refresh_designer(oob)
|
||||
|
||||
|
||||
class WorkflowsCommandManager(BaseCommandManager):
|
||||
def __init__(self, owner):
|
||||
@@ -54,7 +37,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
def select_processor(self, component_id: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.SelectProcessor}",
|
||||
"hx-target": f"#p_{self._id}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-trigger": "change",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
|
||||
@@ -63,7 +46,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
def save_properties(self, component_id: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.SaveProperties}",
|
||||
"hx-target": f"#p_{self._id}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
|
||||
}
|
||||
@@ -71,7 +54,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
def cancel_properties(self, component_id: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.CancelProperties}",
|
||||
"hx-target": f"#p_{self._id}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}"}}',
|
||||
}
|
||||
@@ -79,7 +62,7 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
def on_processor_details_event(self, component_id: str, event_name: str):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.OnProcessorDetailsEvent}",
|
||||
"hx-target": f"#p_{self._id}",
|
||||
"hx-target": f"#ppc_{self._id}",
|
||||
"hx-trigger": "change",
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}", "component_id": "{component_id}", "event_name": "{event_name}"}}',
|
||||
@@ -108,6 +91,13 @@ class WorkflowDesignerCommandManager(BaseCommandManager):
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
def refresh(self):
|
||||
return {
|
||||
"hx_post": f"{ROUTE_ROOT}{Routes.Refresh}",
|
||||
"hx-swap": "none",
|
||||
"hx-vals": f'js:{{"_id": "{self._id}"}}',
|
||||
}
|
||||
|
||||
|
||||
class WorkflowPlayerCommandManager(BaseCommandManager):
|
||||
|
||||
@@ -6,12 +6,15 @@ from fasthtml.xtend import Script
|
||||
|
||||
from assets.icons import icon_error
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.workflows.assets.icons import icon_play, icon_pause, icon_stop
|
||||
from components.workflows.commands import WorkflowDesignerCommandManager, AddConnectorCommand
|
||||
from components.undo_redo.constants import UndoRedoAttrs
|
||||
from components.workflows.assets.icons import icon_play, icon_pause, icon_stop, icon_refresh
|
||||
from components.workflows.commands import WorkflowDesignerCommandManager
|
||||
from components.workflows.components.WorkflowDesignerProperties import WorkflowDesignerProperties
|
||||
from components.workflows.components.WorkflowPlayer import WorkflowPlayer
|
||||
from components.workflows.constants import WORKFLOW_DESIGNER_INSTANCE_ID, ProcessorTypes
|
||||
from components.workflows.constants import WORKFLOW_DESIGNER_INSTANCE_ID, ProcessorTypes, COMPONENT_TYPES, \
|
||||
PROCESSOR_TYPES
|
||||
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, \
|
||||
Connection, WorkflowsDesignerDbManager, ComponentState
|
||||
Connection, WorkflowsDesignerDbManager, ComponentState, WorkflowsDesignerState
|
||||
from components_helpers import apply_boundaries, mk_tooltip, mk_dialog_buttons, mk_icon
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.jira import JiraRequestTypes, DEFAULT_SEARCH_FIELDS
|
||||
@@ -21,33 +24,6 @@ from utils.DbManagementHelper import DbManagementHelper
|
||||
|
||||
logger = logging.getLogger("WorkflowDesigner")
|
||||
|
||||
# Component templates
|
||||
COMPONENT_TYPES = {
|
||||
ProcessorTypes.Producer: {
|
||||
"title": "Data Producer",
|
||||
"description": "Generates or loads data",
|
||||
"icon": "📊",
|
||||
"color": "bg-green-100 border-green-300 text-neutral"
|
||||
},
|
||||
ProcessorTypes.Filter: {
|
||||
"title": "Data Filter",
|
||||
"description": "Filters and transforms data",
|
||||
"icon": "🔍",
|
||||
"color": "bg-blue-100 border-blue-300 text-neutral"
|
||||
},
|
||||
ProcessorTypes.Presenter: {
|
||||
"title": "Data Presenter",
|
||||
"description": "Displays or exports data",
|
||||
"icon": "📋",
|
||||
"color": "bg-purple-100 border-purple-300 text-neutral"
|
||||
}
|
||||
}
|
||||
|
||||
PROCESSOR_TYPES = {
|
||||
ProcessorTypes.Producer: ["Repository", "Jira"],
|
||||
ProcessorTypes.Filter: ["Default"],
|
||||
ProcessorTypes.Presenter: ["Default"]}
|
||||
|
||||
|
||||
class WorkflowDesigner(BaseComponent):
|
||||
def __init__(self, session,
|
||||
@@ -63,25 +39,30 @@ class WorkflowDesigner(BaseComponent):
|
||||
self._key = key
|
||||
self._designer_settings = designer_settings
|
||||
self._db = WorkflowsDesignerDbManager(session, settings_manager)
|
||||
self._state = self._db.load_state(key)
|
||||
self._undo_redo = ComponentsInstancesHelper.get_undo_redo(session)
|
||||
self._state: WorkflowsDesignerState = self._db.load_state(key)
|
||||
self._boundaries = boundaries
|
||||
self.commands = WorkflowDesignerCommandManager(self)
|
||||
self.properties = WorkflowDesignerProperties(self._session, f"{self._id}", self)
|
||||
|
||||
workflow_name = self._designer_settings.workflow_name
|
||||
self._player = InstanceManager.get(self._session,
|
||||
WorkflowPlayer.create_component_id(self._session, workflow_name),
|
||||
WorkflowPlayer,
|
||||
settings_manager=self._settings_manager,
|
||||
tabs_manager=self.tabs_manager,
|
||||
designer=self,
|
||||
boundaries=boundaries)
|
||||
self.player = InstanceManager.get(self._session,
|
||||
WorkflowPlayer.create_component_id(self._session, workflow_name),
|
||||
WorkflowPlayer,
|
||||
settings_manager=self._settings_manager,
|
||||
tabs_manager=self.tabs_manager,
|
||||
designer=self,
|
||||
boundaries=boundaries)
|
||||
|
||||
self._error_message = None
|
||||
|
||||
def set_boundaries(self, boundaries: dict):
|
||||
self._boundaries = boundaries
|
||||
|
||||
def get_state(self):
|
||||
def get_boundaries(self):
|
||||
return self._boundaries
|
||||
|
||||
def get_state(self) -> WorkflowsDesignerState:
|
||||
return self._state
|
||||
|
||||
def get_db(self):
|
||||
@@ -91,11 +72,23 @@ class WorkflowDesigner(BaseComponent):
|
||||
return self._key
|
||||
|
||||
def refresh_designer(self, oob=False):
|
||||
return self._mk_canvas(oob)
|
||||
if oob:
|
||||
return self._mk_canvas(oob)
|
||||
else:
|
||||
return self._mk_elements()
|
||||
|
||||
def refresh_properties(self, oob=False):
|
||||
return self._mk_properties(oob)
|
||||
|
||||
def refresh(self):
|
||||
return self.__ft__(oob=True)
|
||||
|
||||
def refresh_state(self):
|
||||
self._state = self._db.load_state(self._key)
|
||||
self.properties.update_layout()
|
||||
self.properties.update_component(self._state.selected_component_id)
|
||||
return self.__ft__(oob=True)
|
||||
|
||||
def add_component(self, component_type, x, y):
|
||||
self._state.component_counter += 1
|
||||
|
||||
@@ -111,39 +104,40 @@ class WorkflowDesigner(BaseComponent):
|
||||
description=info["description"],
|
||||
properties={"processor_name": PROCESSOR_TYPES[component_type][0]}
|
||||
)
|
||||
|
||||
command = AddConnectorCommand(self, component)
|
||||
undo_redo = ComponentsInstancesHelper.get_undo_redo(self._session)
|
||||
#undo_redo.push(command)
|
||||
self._state.components[component_id] = component
|
||||
self._db.save_state(self._key, self._state) # update db
|
||||
undo_redo.snapshot("add_component")
|
||||
return command.redo(), undo_redo.refresh()
|
||||
# self._state.components[component_id] = component
|
||||
# self._db.save_state(self._key, self._state) # update db
|
||||
# return self.refresh_designer()
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Add Component '{component_type}'", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs) # update db
|
||||
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def move_component(self, component_id, x, y):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
self._state.selected_component_id = component_id
|
||||
self._state.components[component_id].x = int(x)
|
||||
self._state.components[component_id].y = int(y)
|
||||
self._db.save_state(self._key, self._state) # update db
|
||||
component.x = int(x)
|
||||
component.y = int(y)
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Move Component '{component.title}'", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs) # update db
|
||||
|
||||
return self.refresh_designer(), self.refresh_properties(True)
|
||||
return self.refresh_designer(), self.properties.refresh(mode="form", oob=True), self._undo_redo.refresh()
|
||||
|
||||
def delete_component(self, component_id):
|
||||
# Remove component
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
del self._state.components[component_id]
|
||||
|
||||
# Remove related connections
|
||||
self._state.connections = [connection for connection in self._state.connections
|
||||
if connection.from_id != component_id and connection.to_id != component_id]
|
||||
|
||||
# update db
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Remove Component '{component.title}'", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
# Remove related connections
|
||||
self._state.connections = [connection for connection in self._state.connections
|
||||
if connection.from_id != component_id and connection.to_id != component_id]
|
||||
# update db
|
||||
self._db.save_state(self._key, self._state)
|
||||
|
||||
return self.refresh_designer()
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def add_connection(self, from_id, to_id):
|
||||
# Check if connection already exists
|
||||
@@ -156,9 +150,10 @@ class WorkflowDesigner(BaseComponent):
|
||||
self._state.connections.append(connection)
|
||||
|
||||
# update db
|
||||
self._db.save_state(self._key, self._state)
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Add Connection", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.refresh_designer()
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def delete_connection(self, from_id, to_id):
|
||||
for connection in self._state.connections:
|
||||
@@ -166,64 +161,84 @@ class WorkflowDesigner(BaseComponent):
|
||||
self._state.connections.remove(connection)
|
||||
|
||||
# update db
|
||||
self._db.save_state(self._key, self._state)
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Delete Connection", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.refresh_designer()
|
||||
return self.refresh_designer(), self._undo_redo.refresh()
|
||||
|
||||
def set_designer_height(self, height):
|
||||
self._state.designer_height = height
|
||||
self._db.save_state(self._key, self._state)
|
||||
return self.__ft__() # refresh the whole component
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Resize Designer", on_undo=lambda: self.refresh_state())
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
return self.__ft__(), self._undo_redo.refresh() # refresh the whole component
|
||||
|
||||
def update_properties_layout(self, input_width, properties_width, output_width):
|
||||
self._state.properties_input_width = input_width
|
||||
self._state.properties_properties_width = properties_width
|
||||
self._state.properties_output_width = output_width
|
||||
self.properties.update_layout()
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Resize Properties", on_undo=lambda: self.refresh_state())
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
return self.__ft__(), self._undo_redo.refresh() # refresh the whole component
|
||||
|
||||
def select_component(self, component_id):
|
||||
if component_id in self._state.components:
|
||||
self._state.selected_component_id = component_id
|
||||
self._db.save_state(self._key, self._state)
|
||||
|
||||
component = self._state.components[component_id]
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Select Component {component.title}", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
|
||||
return self.refresh_properties()
|
||||
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
|
||||
|
||||
def save_properties(self, component_id: str, details: dict):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
component.properties = details
|
||||
self._db.save_state(self._key, self._state)
|
||||
component.properties |= details
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Set properties for {component.title}", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
logger.debug(f"Saved properties for component {component_id}: {details}")
|
||||
|
||||
return self.refresh_properties()
|
||||
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
|
||||
|
||||
def cancel_properties(self, component_id: str):
|
||||
if component_id in self._state.components:
|
||||
logger.debug(f"Cancel saving properties for component {component_id}")
|
||||
|
||||
return self.refresh_properties()
|
||||
return self.properties.refresh(mode="form")
|
||||
|
||||
def set_selected_processor(self, component_id: str, processor_name: str):
|
||||
if component_id in self._state.components:
|
||||
component = self._state.components[component_id]
|
||||
component.properties = {"processor_name": processor_name}
|
||||
|
||||
self._db.save_state(self._key, self._state)
|
||||
return self.refresh_properties()
|
||||
|
||||
undo_redo_attrs = UndoRedoAttrs(f"Set Processor for {component.title}", on_undo=self.refresh_state)
|
||||
self._db.save_state(self._key, self._state, undo_redo_attrs)
|
||||
return self.properties.refresh(mode="form"), self._undo_redo.refresh()
|
||||
|
||||
def play_workflow(self, boundaries: dict):
|
||||
self._error_message = None
|
||||
|
||||
self._player.run()
|
||||
if self._player.global_error:
|
||||
self.player.run()
|
||||
if self.player.global_error:
|
||||
# Show the error message in the same tab
|
||||
self._error_message = self._player.global_error
|
||||
self._error_message = self.player.global_error
|
||||
|
||||
else:
|
||||
|
||||
self.properties.set_entry_selector_data(self.player.nb_items)
|
||||
# change the tab and display the results
|
||||
self._player.set_boundaries(boundaries)
|
||||
self.tabs_manager.add_tab(f"Workflow {self._designer_settings.workflow_name}", self._player, self._player.key)
|
||||
self.player.set_boundaries(boundaries)
|
||||
self.tabs_manager.add_tab(f"Workflow {self._designer_settings.workflow_name}", self.player, self.player.key)
|
||||
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def stop_workflow(self):
|
||||
self._error_message = None
|
||||
self._player.stop()
|
||||
self.player.stop()
|
||||
self.properties.set_entry_selector_data(0)
|
||||
return self.tabs_manager.refresh()
|
||||
|
||||
def on_processor_details_event(self, component_id: str, event_name: str, details: dict):
|
||||
@@ -236,7 +251,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
elif event_name == "OnJiraRequestTypeChanged":
|
||||
component.properties["request_type"] = details["request_type"]
|
||||
|
||||
return self.refresh_properties()
|
||||
return self.properties.refresh(mode="form")
|
||||
|
||||
def get_workflow_name(self):
|
||||
return self._designer_settings.workflow_name
|
||||
@@ -247,12 +262,13 @@ class WorkflowDesigner(BaseComponent):
|
||||
def get_workflow_connections(self):
|
||||
return self._state.connections
|
||||
|
||||
def __ft__(self):
|
||||
def __ft__(self, oob=False):
|
||||
return Div(
|
||||
H1(f"{self._designer_settings.workflow_name}", cls="text-xl font-bold"),
|
||||
P("Drag components from the toolbox to the canvas to create your workflow.", cls="text-sm mb-6"),
|
||||
# P("Drag components from the toolbox to the canvas to create your workflow.", cls="text-sm"),
|
||||
Div(
|
||||
self._mk_media(),
|
||||
# self._mk_refresh_button(),
|
||||
self._mk_error_message(),
|
||||
cls="flex mb-2",
|
||||
id=f"t_{self._id}"
|
||||
@@ -263,6 +279,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
Script(f"bindWorkflowDesigner('{self._id}');"),
|
||||
**apply_boundaries(self._boundaries),
|
||||
id=f"{self._id}",
|
||||
hx_swap_oob='true' if oob else None,
|
||||
)
|
||||
|
||||
def _mk_connection_svg(self, conn: Connection):
|
||||
@@ -298,7 +315,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
|
||||
def _mk_component(self, component: WorkflowComponent):
|
||||
|
||||
runtime_state = self._player.get_component_runtime_state(component.id)
|
||||
runtime_state = self.player.get_component_runtime_state(component.id)
|
||||
|
||||
info = COMPONENT_TYPES[component.type]
|
||||
is_selected = self._state.selected_component_id == component.id
|
||||
@@ -340,6 +357,11 @@ class WorkflowDesigner(BaseComponent):
|
||||
)
|
||||
|
||||
def _mk_elements(self):
|
||||
if len(self._state.components) == 0:
|
||||
return Div("Drag components from the toolbox to the canvas to create your workflow.",
|
||||
cls="flex items-center justify-center h-full w-full"
|
||||
)
|
||||
|
||||
return Div(
|
||||
# Render connections
|
||||
*[NotStr(self._mk_connection_svg(conn)) for conn in self._state.connections],
|
||||
@@ -384,6 +406,9 @@ class WorkflowDesigner(BaseComponent):
|
||||
cls=f"media-controls flex m-2"
|
||||
)
|
||||
|
||||
def _mk_refresh_button(self):
|
||||
return mk_icon(icon_refresh, **self.commands.refresh())
|
||||
|
||||
def _mk_error_message(self):
|
||||
if not self._error_message:
|
||||
return Div()
|
||||
@@ -408,6 +433,17 @@ class WorkflowDesigner(BaseComponent):
|
||||
|
||||
return Div('Not defined yet !')
|
||||
|
||||
def _mk_properties_output(self, component):
|
||||
return Div(
|
||||
"Output name",
|
||||
Input(type="input",
|
||||
name="output_name",
|
||||
placeholder="data",
|
||||
value=component.properties.get("output_name", None),
|
||||
cls="input w-xs"),
|
||||
cls="join"
|
||||
)
|
||||
|
||||
def _mk_properties_details(self, component_id, allow_component_selection=False):
|
||||
def _mk_header():
|
||||
return Div(
|
||||
@@ -441,11 +477,32 @@ class WorkflowDesigner(BaseComponent):
|
||||
return Div(
|
||||
Form(
|
||||
_mk_header(),
|
||||
_mk_select(),
|
||||
self._mk_processor_properties(component, selected_processor_name),
|
||||
Div(
|
||||
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Properties", checked="checked"),
|
||||
Div(
|
||||
_mk_select(),
|
||||
self._mk_processor_properties(component, selected_processor_name),
|
||||
cls="tab-content"
|
||||
),
|
||||
|
||||
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Inputs"),
|
||||
Div(
|
||||
"Inputs",
|
||||
cls="tab-content"
|
||||
),
|
||||
|
||||
Input(type="radio", name=f"pt_{self._id}", cls="tab", aria_label="Output"),
|
||||
Div(
|
||||
self._mk_properties_output(component),
|
||||
cls="tab-content"
|
||||
),
|
||||
|
||||
cls="tabs tabs-border"
|
||||
),
|
||||
mk_dialog_buttons(cls="mt-4",
|
||||
on_ok=self.commands.save_properties(component_id),
|
||||
on_cancel=self.commands.cancel_properties(component_id)),
|
||||
|
||||
cls="font-mono text-sm",
|
||||
id=f"f_{self._id}_{component_id}",
|
||||
),
|
||||
@@ -453,13 +510,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
)
|
||||
|
||||
def _mk_properties(self, oob=False):
|
||||
return Div(
|
||||
self._mk_properties_details(self._state.selected_component_id),
|
||||
cls="p-2 bg-base-100 rounded-lg border",
|
||||
style=f"height:{self._get_properties_height()}px;",
|
||||
hx_swap_oob='true' if oob else None,
|
||||
id=f"p_{self._id}",
|
||||
)
|
||||
return self.properties.__ft__(oob)
|
||||
|
||||
def _mk_jira_processor_details(self, component):
|
||||
def _mk_option(name):
|
||||
@@ -468,7 +519,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
selected="selected" if name.value == request_type else None)
|
||||
|
||||
def _mk_input_group():
|
||||
if request_type == JiraRequestTypes.Issues.value:
|
||||
if request_type == JiraRequestTypes.Search.value:
|
||||
return Div(
|
||||
Input(type="text",
|
||||
name="request",
|
||||
@@ -488,7 +539,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
)
|
||||
|
||||
def _mk_extra_parameters():
|
||||
if request_type == JiraRequestTypes.Issues.value:
|
||||
if request_type == JiraRequestTypes.Search.value:
|
||||
return Input(type="text",
|
||||
name="fields",
|
||||
value=component.properties.get("fields", DEFAULT_SEARCH_FIELDS),
|
||||
@@ -497,7 +548,7 @@ class WorkflowDesigner(BaseComponent):
|
||||
else:
|
||||
return None
|
||||
|
||||
request_type = component.properties.get("request_type", JiraRequestTypes.Issues.value)
|
||||
request_type = component.properties.get("request_type", JiraRequestTypes.Search.value)
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("JQL", cls="fieldset-legend"),
|
||||
|
||||
@@ -0,0 +1,384 @@
|
||||
from fasthtml.common import *
|
||||
|
||||
from components.BaseComponent import BaseComponent
|
||||
from components.entryselector.components.EntrySelector import EntrySelector
|
||||
from components.jsonviewer.components.JsonViewer import JsonViewer
|
||||
from components.workflows.constants import COMPONENT_TYPES, PROCESSOR_TYPES
|
||||
from components_helpers import mk_dialog_buttons
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.jira import JiraRequestTypes, DEFAULT_SEARCH_FIELDS
|
||||
from utils.DbManagementHelper import DbManagementHelper
|
||||
|
||||
|
||||
@dataclass
|
||||
class DesignerLayout:
|
||||
input_width: int
|
||||
properties_width: int
|
||||
output_width: int
|
||||
|
||||
|
||||
class WorkflowDesignerProperties(BaseComponent):
|
||||
def __init__(self, session, instance_id, owner):
|
||||
super().__init__(session, instance_id)
|
||||
self._owner = owner
|
||||
self._boundaries = self._owner.get_boundaries()
|
||||
self._commands = self._owner.commands
|
||||
self.layout = None
|
||||
self._component = None
|
||||
self.update_layout()
|
||||
self.update_component(self._owner.get_state().selected_component_id)
|
||||
self.entry_selector: EntrySelector = InstanceManager.new(self._session,
|
||||
EntrySelector,
|
||||
owner=self,
|
||||
hooks={
|
||||
"on_entry_selected": self.on_entry_selector_changed})
|
||||
self._input_jsonviewer: JsonViewer = InstanceManager.new(self._session,
|
||||
JsonViewer)
|
||||
self._output_jsonviewer: JsonViewer = InstanceManager.new(self._session,
|
||||
JsonViewer)
|
||||
|
||||
def set_entry_selector_data(self, data):
|
||||
self.entry_selector.set_data(data)
|
||||
|
||||
def update_layout(self):
|
||||
if self._owner.get_state().properties_input_width is None:
|
||||
input_width = self._boundaries["width"] // 3
|
||||
properties_width = self._boundaries["width"] // 3
|
||||
output_width = self._boundaries["width"] - input_width - properties_width
|
||||
else:
|
||||
input_width = self._owner.get_state().properties_input_width
|
||||
properties_width = self._owner.get_state().properties_properties_width
|
||||
output_width = self._owner.get_state().properties_output_width
|
||||
|
||||
self.layout = DesignerLayout(
|
||||
input_width=input_width,
|
||||
properties_width=properties_width,
|
||||
output_width=output_width
|
||||
)
|
||||
|
||||
def update_component(self, component_id):
|
||||
if component_id is None or component_id not in self._owner.get_state().components:
|
||||
self._component = None
|
||||
else:
|
||||
self._component = self._owner.get_state().components[component_id]
|
||||
|
||||
def refresh(self, mode="all", oob=False):
|
||||
self.update_component(self._owner.get_state().selected_component_id)
|
||||
if mode == "form":
|
||||
return self._mk_content(oob=oob)
|
||||
|
||||
return self.__ft__(oob=oob)
|
||||
|
||||
def on_entry_selector_changed(self, entry):
|
||||
entry = int(entry)
|
||||
|
||||
input_data, output_data = None, None
|
||||
selected_component_id = self._owner.get_state().selected_component_id
|
||||
if selected_component_id is not None:
|
||||
runtime_state = self._owner.player.runtime_states.get(selected_component_id, None)
|
||||
if runtime_state is not None:
|
||||
input_content = runtime_state.input[entry] if len(runtime_state.input) > entry else None
|
||||
output_content = runtime_state.output[entry] if len(runtime_state.output) > entry else None
|
||||
if input_content is not None:
|
||||
self._input_jsonviewer.set_data(input_content.item.as_dict())
|
||||
input_data = self._input_jsonviewer
|
||||
if output_content is not None:
|
||||
self._output_jsonviewer.set_data(output_content.item.as_dict())
|
||||
output_data = self._output_jsonviewer
|
||||
|
||||
return (self._mk_input(content=input_data, oob=True),
|
||||
self._mk_output(content=output_data, oob=True))
|
||||
|
||||
def _mk_layout(self):
|
||||
return Div(
|
||||
self.entry_selector,
|
||||
Div(
|
||||
self._mk_input(),
|
||||
self._mk_properties(),
|
||||
self._mk_output(),
|
||||
cls="flex",
|
||||
style="height: 100%; width: 100%; flex: 1;"
|
||||
)
|
||||
)
|
||||
|
||||
def _mk_input(self, content=None, oob=False):
|
||||
return Div(
|
||||
content,
|
||||
id=f"pi_{self._id}",
|
||||
style=f"width: {self.layout.input_width}px;",
|
||||
cls="wkf-properties-input",
|
||||
hx_swap_oob=f'true' if oob else None,
|
||||
)
|
||||
|
||||
def _mk_output(self, content=None, oob=False):
|
||||
return Div(
|
||||
content,
|
||||
id=f"po_{self._id}",
|
||||
style=f"width: {self.layout.output_width}px;",
|
||||
cls="wkf-properties-output",
|
||||
hx_swap_oob=f'true' if oob else None,
|
||||
)
|
||||
|
||||
def _mk_properties(self):
|
||||
return Div(
|
||||
# Drag handle (20px height)
|
||||
Div(
|
||||
A(cls="wkf-properties-handle-top"),
|
||||
cls="wkf-properties-top",
|
||||
id=f"ppt_{self._id}",
|
||||
),
|
||||
|
||||
# Properties content
|
||||
self._mk_content(),
|
||||
|
||||
# Left resize handle
|
||||
Div(
|
||||
id=f"ppl_{self._id}",
|
||||
cls="wkf-properties-handle-left"
|
||||
),
|
||||
|
||||
# Right resize handle
|
||||
Div(
|
||||
id=f"ppr_{self._id}",
|
||||
cls="wkf-properties-handle-right"
|
||||
),
|
||||
|
||||
id=f"pp_{self._id}",
|
||||
style=f"width: {self.layout.properties_width}px; height: 100%;",
|
||||
cls="wkf-properties-properties flex flex-col",
|
||||
)
|
||||
|
||||
def _mk_content(self, oob=False):
|
||||
|
||||
return Div(
|
||||
self._header(),
|
||||
self._form(),
|
||||
cls="wkf-properties-content",
|
||||
id=f"ppc_{self._id}",
|
||||
hx_swap_oob=f'true' if oob else None,
|
||||
)
|
||||
|
||||
def _header(self):
|
||||
if self._component is None:
|
||||
return None
|
||||
|
||||
icon = COMPONENT_TYPES[self._component.type]["icon"]
|
||||
color = COMPONENT_TYPES[self._component.type]["color"]
|
||||
return Div(
|
||||
Div(
|
||||
Span(icon),
|
||||
H4(self._component.title, cls="font-semibold text-xs"),
|
||||
cls=f"rounded-lg border-2 {color} flex text-center px-2"
|
||||
),
|
||||
Div(self._component.id, cls="ml-2"),
|
||||
cls="flex wkf-properties-content-header",
|
||||
)
|
||||
|
||||
def _form(self):
|
||||
if self._component is None:
|
||||
return None
|
||||
|
||||
component_id = self._component.id
|
||||
return Form(
|
||||
Div(
|
||||
self._mk_select_processor(),
|
||||
self._content_details(),
|
||||
style="flex-grow: 1; overflow-y: auto;"
|
||||
),
|
||||
mk_dialog_buttons(cls="pb-2",
|
||||
on_ok=self._commands.save_properties(component_id),
|
||||
on_cancel=self._commands.cancel_properties(component_id)
|
||||
),
|
||||
id=f"ppf_{self._id}",
|
||||
cls="wkf-properties-content-form",
|
||||
)
|
||||
|
||||
def _mk_select_processor(self):
|
||||
selected_processor_name = self._component.properties.get("processor_name", None)
|
||||
return Select(
|
||||
*[Option(processor_name, selected="selected" if processor_name == selected_processor_name else None)
|
||||
for processor_name in PROCESSOR_TYPES[self._component.type]],
|
||||
cls="select select-sm m-2",
|
||||
id="processor_name",
|
||||
name="processor_name",
|
||||
**self._commands.select_processor(self._component.id)
|
||||
)
|
||||
|
||||
def _content_details(self):
|
||||
component_type = self._component.type
|
||||
processor_name = self._component.properties.get("processor_name", None)
|
||||
key = f"_mk_details_{component_type}_{processor_name}".lower()
|
||||
if hasattr(self, key):
|
||||
return getattr(self, key)()
|
||||
else:
|
||||
return Div(f"Component '{key}' not found")
|
||||
|
||||
def _mk_details_producer_jira(self):
|
||||
def _mk_option(name):
|
||||
"""
|
||||
Generic helper to create options
|
||||
:param name:
|
||||
:return:
|
||||
"""
|
||||
return Option(name.name,
|
||||
value=name.value,
|
||||
selected="selected" if name.value == request_type else None)
|
||||
|
||||
def _mk_input_group():
|
||||
if request_type == JiraRequestTypes.Search.value or request_type == "issues": # remove issues at some point
|
||||
return [
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_fields",
|
||||
value=self._component.properties.get(f"{request_type}_fields", DEFAULT_SEARCH_FIELDS),
|
||||
placeholder="default fields",
|
||||
cls="input w-full"),
|
||||
P("Jira fields to retrieve"),
|
||||
),
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_request",
|
||||
value=self._component.properties.get(f"{request_type}_request", ""),
|
||||
placeholder="Enter JQL",
|
||||
cls="input w-full"),
|
||||
P("Write your jql code"),
|
||||
)
|
||||
]
|
||||
elif request_type in (JiraRequestTypes.Issue.value, JiraRequestTypes.Comments.value):
|
||||
return [
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_request",
|
||||
value=self._component.properties.get(f"{request_type}_request", ""),
|
||||
placeholder="Issue id",
|
||||
cls="input w-full"),
|
||||
P("Put the issue id here"),
|
||||
)
|
||||
]
|
||||
elif request_type == JiraRequestTypes.Versions.value:
|
||||
return [
|
||||
Div(
|
||||
Input(type="text",
|
||||
name=f"{request_type}_request",
|
||||
value=self._component.properties.get(f"{request_type}_request", ""),
|
||||
placeholder="Project key",
|
||||
cls="input w-full"),
|
||||
P("Enter the project key"),
|
||||
)
|
||||
]
|
||||
else:
|
||||
return [Div(f"** Not Implemented ** ('{request_type}' not supported yet)")]
|
||||
|
||||
request_type = self._component.properties.get("request_type", JiraRequestTypes.Search.value)
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Jira", cls="fieldset-legend"),
|
||||
Div(
|
||||
Select(
|
||||
*[_mk_option(enum) for enum in JiraRequestTypes],
|
||||
cls="select w-xs",
|
||||
name="request_type",
|
||||
**self._commands.on_processor_details_event(self._component.id, "OnJiraRequestTypeChanged"),
|
||||
),
|
||||
P("Jira ressource type"),
|
||||
cls="mb-4"
|
||||
),
|
||||
*_mk_input_group(),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
),
|
||||
)
|
||||
|
||||
def _mk_details_producer_repository(self):
|
||||
selected_repo = self._component.properties.get("repository", None)
|
||||
selected_table = self._component.properties.get("table", None)
|
||||
|
||||
def _mk_repositories_options():
|
||||
repositories = DbManagementHelper.list_repositories(self._session)
|
||||
if len(repositories) == 0:
|
||||
return [Option("No repository available", disabled=True)]
|
||||
|
||||
return ([Option("Choose a repository", disabled=True, selected="selected" if selected_repo is None else None)] +
|
||||
[Option(repo.name, selected="selected" if repo.name == selected_repo else None)
|
||||
for repo in DbManagementHelper.list_repositories(self._session)])
|
||||
|
||||
def _mk_tables_options():
|
||||
if selected_repo is None:
|
||||
return [Option("No repository selected", disabled=True, selected="selected")]
|
||||
|
||||
tables = DbManagementHelper.list_tables(self._session, selected_repo)
|
||||
if len(tables) == 0:
|
||||
return [Option("No table available", disabled=True)]
|
||||
|
||||
return ([Option("Choose a table", disabled=True, selected="selected" if selected_table is None else None)] +
|
||||
[Option(table, selected="selected" if table == selected_table else None)
|
||||
for table in DbManagementHelper.list_tables(self._session, selected_repo)])
|
||||
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Repository", cls="fieldset-legend"),
|
||||
Div(
|
||||
Select(
|
||||
*_mk_repositories_options(),
|
||||
cls="select w-64",
|
||||
id=f"repository_{self._id}",
|
||||
name="repository",
|
||||
**self._commands.on_processor_details_event(self._component.id, "OnRepositoryChanged"),
|
||||
),
|
||||
P("Select the repository"),
|
||||
),
|
||||
Div(
|
||||
Select(
|
||||
*_mk_tables_options(),
|
||||
cls="select w-64",
|
||||
id=f"table_{self._id}",
|
||||
name="table",
|
||||
),
|
||||
P("Select the table"),
|
||||
),
|
||||
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
|
||||
)
|
||||
)
|
||||
|
||||
def _mk_details_filter_default(self):
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Filter", cls="fieldset-legend"),
|
||||
Input(type="text",
|
||||
name="filter",
|
||||
value=self._component.properties.get("filter", ""),
|
||||
placeholder="Filter expression",
|
||||
cls="input w-full"),
|
||||
P("Filter expression"),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
|
||||
def _mk_details_presenter_default(self):
|
||||
return Div(
|
||||
Fieldset(
|
||||
Legend("Presenter", cls="fieldset-legend"),
|
||||
Input(type="text",
|
||||
name="columns",
|
||||
value=self._component.properties.get("columns", ""),
|
||||
placeholder="Columns to display, separated by comma",
|
||||
cls="input w-full"),
|
||||
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
|
||||
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
|
||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||
)
|
||||
)
|
||||
|
||||
def __ft__(self, oob=False):
|
||||
# return self.render()
|
||||
return Div(
|
||||
self._mk_layout(),
|
||||
style=f"height: {self._get_height()}px;",
|
||||
id=f"p_{self._id}",
|
||||
hx_swap_oob=f'innerHTML' if oob else None,
|
||||
cls="wkf-properties"
|
||||
)
|
||||
|
||||
def _get_height(self):
|
||||
return self._boundaries["height"] - self._owner.get_state().designer_height - 86
|
||||
@@ -53,6 +53,7 @@ class WorkflowPlayer(BaseComponent):
|
||||
self.runtime_states = {}
|
||||
self.global_error = None
|
||||
self.has_error = False
|
||||
self.nb_items = 0
|
||||
|
||||
def set_boundaries(self, boundaries: dict):
|
||||
self._datagrid.set_boundaries(boundaries)
|
||||
@@ -93,11 +94,14 @@ class WorkflowPlayer(BaseComponent):
|
||||
self.global_error = engine.global_error
|
||||
|
||||
else: # loop through the components and update the runtime states
|
||||
self.nb_items = engine.nb_items
|
||||
for component in sorted_components:
|
||||
runtime_state = self.runtime_states.get(component.id)
|
||||
|
||||
if component.id not in engine.errors:
|
||||
runtime_state.state = ComponentState.SUCCESS
|
||||
runtime_state.input = engine.debug[component.id]["input"]
|
||||
runtime_state.output = engine.debug[component.id]["output"]
|
||||
continue
|
||||
|
||||
# the component failed
|
||||
@@ -177,7 +181,7 @@ class WorkflowPlayer(BaseComponent):
|
||||
# Return sorted components
|
||||
return [components_by_id[cid] for cid in sorted_order]
|
||||
|
||||
def _get_engine(self, sorted_components):
|
||||
def _get_engine(self, sorted_components) -> WorkflowEngine:
|
||||
# first reorder the component, according to the connection definitions
|
||||
engine = WorkflowEngine()
|
||||
for component in sorted_components:
|
||||
@@ -191,13 +195,14 @@ class WorkflowPlayer(BaseComponent):
|
||||
component.properties["repository"],
|
||||
component.properties["table"]))
|
||||
elif key == (ProcessorTypes.Producer, "Jira"):
|
||||
request_type = component.properties["request_type"]
|
||||
engine.add_processor(
|
||||
JiraDataProducer(self._session,
|
||||
self._settings_manager,
|
||||
component.id,
|
||||
component.properties["request_type"],
|
||||
component.properties["request"],
|
||||
component.properties["fields"]))
|
||||
component.properties[f"{request_type}_request"],
|
||||
component.properties.get(f"{request_type}_fields", None)))
|
||||
elif key == (ProcessorTypes.Filter, "Default"):
|
||||
engine.add_processor(DefaultDataFilter(component.id, component.properties["filter"]))
|
||||
elif key == (ProcessorTypes.Presenter, "Default"):
|
||||
|
||||
@@ -6,11 +6,39 @@ WORKFLOW_DESIGNER_DB_ENTRY = "WorkflowDesigner"
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY = "Settings"
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY = "State"
|
||||
|
||||
|
||||
class ProcessorTypes:
|
||||
Producer = "producer"
|
||||
Filter = "filter"
|
||||
Presenter = "presenter"
|
||||
|
||||
|
||||
COMPONENT_TYPES = {
|
||||
ProcessorTypes.Producer: {
|
||||
"title": "Data Producer",
|
||||
"description": "Generates or loads data",
|
||||
"icon": "📊",
|
||||
"color": "bg-green-100 border-green-300 text-neutral"
|
||||
},
|
||||
ProcessorTypes.Filter: {
|
||||
"title": "Data Filter",
|
||||
"description": "Filters and transforms data",
|
||||
"icon": "🔍",
|
||||
"color": "bg-blue-100 border-blue-300 text-neutral"
|
||||
},
|
||||
ProcessorTypes.Presenter: {
|
||||
"title": "Data Presenter",
|
||||
"description": "Displays or exports data",
|
||||
"icon": "📋",
|
||||
"color": "bg-purple-100 border-purple-300 text-neutral"
|
||||
}
|
||||
}
|
||||
|
||||
PROCESSOR_TYPES = {
|
||||
ProcessorTypes.Producer: ["Repository", "Jira"],
|
||||
ProcessorTypes.Filter: ["Default"],
|
||||
ProcessorTypes.Presenter: ["Default"]}
|
||||
|
||||
ROUTE_ROOT = "/workflows"
|
||||
|
||||
|
||||
@@ -25,6 +53,7 @@ class Routes:
|
||||
AddConnection = "/add-connection"
|
||||
DeleteConnection = "/delete-connection"
|
||||
ResizeDesigner = "/resize-designer"
|
||||
UpdatePropertiesLayout = "/update-properties-layout"
|
||||
SaveProperties = "/save-properties"
|
||||
CancelProperties = "/cancel-properties"
|
||||
SelectProcessor = "/select-processor"
|
||||
@@ -32,4 +61,4 @@ class Routes:
|
||||
PlayWorkflow = "/play-workflow"
|
||||
PauseWorkflow = "/pause-workflow"
|
||||
StopWorkflow = "/stop-workflow"
|
||||
|
||||
Refresh = "/refresh"
|
||||
|
||||
@@ -2,10 +2,12 @@ import enum
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from components.undo_redo.constants import UndoRedoAttrs
|
||||
from components.workflows.constants import WORKFLOWS_DB_ENTRY, WORKFLOW_DESIGNER_DB_ENTRY, \
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY, WORKFLOW_DESIGNER_DB_STATE_ENTRY
|
||||
from core.settings_management import SettingsManager
|
||||
from core.utils import make_safe_id
|
||||
from utils.ComponentsInstancesHelper import ComponentsInstancesHelper
|
||||
|
||||
logger = logging.getLogger("WorkflowsSettings")
|
||||
|
||||
@@ -46,6 +48,8 @@ class WorkflowComponentRuntimeState:
|
||||
id: str
|
||||
state: ComponentState = ComponentState.SUCCESS
|
||||
error_message: str | None = None
|
||||
input: list = None
|
||||
output: list = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -57,9 +61,12 @@ class WorkflowsDesignerSettings:
|
||||
class WorkflowsDesignerState:
|
||||
components: dict[str, WorkflowComponent] = field(default_factory=dict)
|
||||
connections: list[Connection] = field(default_factory=list)
|
||||
component_counter = 0
|
||||
designer_height = 230
|
||||
selected_component_id = None
|
||||
component_counter: int = 0
|
||||
designer_height: int = 230
|
||||
properties_input_width: int = None
|
||||
properties_properties_width: int = None
|
||||
properties_output_width: int = None
|
||||
selected_component_id: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -158,6 +165,7 @@ class WorkflowsDesignerDbManager:
|
||||
def __init__(self, session: dict, settings_manager: SettingsManager):
|
||||
self._session = session
|
||||
self._settings_manager = settings_manager
|
||||
self._undo_redo = ComponentsInstancesHelper.get_undo_redo(session)
|
||||
|
||||
@staticmethod
|
||||
def _get_db_entry(key):
|
||||
@@ -169,11 +177,17 @@ class WorkflowsDesignerDbManager:
|
||||
WORKFLOW_DESIGNER_DB_SETTINGS_ENTRY,
|
||||
settings)
|
||||
|
||||
def save_state(self, key: str, state: WorkflowsDesignerState):
|
||||
def save_state(self, key: str, state: WorkflowsDesignerState, undo_redo_attrs: UndoRedoAttrs = None):
|
||||
db_entry = self._get_db_entry(key)
|
||||
self._settings_manager.put(self._session,
|
||||
self._get_db_entry(key),
|
||||
db_entry,
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY,
|
||||
state)
|
||||
|
||||
if undo_redo_attrs is not None:
|
||||
self._undo_redo.snapshot(undo_redo_attrs,
|
||||
db_entry,
|
||||
WORKFLOW_DESIGNER_DB_STATE_ENTRY)
|
||||
|
||||
def save_all(self, key: str, settings: WorkflowsDesignerSettings = None, state: WorkflowsDesignerState = None):
|
||||
items = {}
|
||||
|
||||
@@ -48,6 +48,9 @@ class Expando:
|
||||
return self._props.copy()
|
||||
|
||||
def to_dict(self, mappings: dict) -> dict:
|
||||
"""
|
||||
Return the information as a dictionary, with the given mappings
|
||||
"""
|
||||
return {prop_name: self.get(path) for path, prop_name in mappings.items() if prop_name is not None}
|
||||
|
||||
def __hasattr__(self, item):
|
||||
|
||||
@@ -271,6 +271,42 @@ class DbEngine:
|
||||
except KeyError:
|
||||
raise DbException(f"Key '{key}' not found in entry '{entry}'")
|
||||
|
||||
def history(self, user_id, entry, digest=None, max_items=1000):
|
||||
"""
|
||||
Gives the current digest and all its ancestors
|
||||
:param user_id:
|
||||
:param entry:
|
||||
:param digest:
|
||||
:param max_items:
|
||||
:return:
|
||||
"""
|
||||
with self.lock:
|
||||
logger.info(f"History for {user_id=}, {entry=}, {digest=}")
|
||||
|
||||
digest_to_use = digest or self._get_entry_digest(user_id, entry)
|
||||
logger.debug(f"Using digest {digest_to_use}.")
|
||||
|
||||
count = 0
|
||||
history = []
|
||||
|
||||
while True:
|
||||
if count >= max_items or digest_to_use is None:
|
||||
break
|
||||
|
||||
history.append(digest_to_use)
|
||||
count += 1
|
||||
|
||||
try:
|
||||
target_file = self._get_obj_path(user_id, digest_to_use)
|
||||
with open(target_file, 'r', encoding='utf-8') as file:
|
||||
as_dict = json.load(file)
|
||||
|
||||
digest_to_use = as_dict[TAG_PARENT][0]
|
||||
except FileNotFoundError:
|
||||
break
|
||||
|
||||
return history
|
||||
|
||||
def debug_root(self):
|
||||
"""
|
||||
Lists all folders in the root directory
|
||||
@@ -312,7 +348,7 @@ class DbEngine:
|
||||
return []
|
||||
return [f for f in os.listdir(self.root) if os.path.isdir(os.path.join(self.root, f)) and f != 'refs']
|
||||
|
||||
def debug_get_digest(self, user_id, entry):
|
||||
def get_digest(self, user_id, entry):
|
||||
return self._get_entry_digest(user_id, entry)
|
||||
|
||||
def _serialize(self, obj):
|
||||
|
||||
76
src/core/fasthtml_helper.py
Normal file
76
src/core/fasthtml_helper.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from fastcore.basics import NotStr
|
||||
|
||||
from core.utils import merge_classes
|
||||
|
||||
attr_map = {
|
||||
"cls": "class",
|
||||
"_id": "id",
|
||||
}
|
||||
|
||||
|
||||
def safe_attr(attr_name):
|
||||
attr_name = attr_name.replace("hx_", "hx-")
|
||||
attr_name = attr_name.replace("data_", "data-")
|
||||
return attr_map.get(attr_name, attr_name)
|
||||
|
||||
|
||||
def to_html(item):
|
||||
if item is None:
|
||||
return ""
|
||||
elif isinstance(item, str):
|
||||
return item
|
||||
elif isinstance(item, (int, float, bool)):
|
||||
return str(item)
|
||||
elif isinstance(item, MyFt):
|
||||
return item.to_html()
|
||||
elif isinstance(item, NotStr):
|
||||
return str(item)
|
||||
else:
|
||||
raise Exception(f"Unsupported type: {type(item)}, {item=}")
|
||||
|
||||
|
||||
class MyFt:
|
||||
def __init__(self, tag, *args, **kwargs):
|
||||
self.tag = tag
|
||||
self.children = args
|
||||
self.attrs = {safe_attr(k): v for k, v in kwargs.items()}
|
||||
|
||||
def to_html(self):
|
||||
body_items = [to_html(item) for item in self.children]
|
||||
return f"<{self.tag} {' '.join(f'{k}="{v}"' for k, v in self.attrs.items())}>{' '.join(body_items)}</div>"
|
||||
|
||||
def __ft__(self):
|
||||
return NotStr(self.to_html())
|
||||
|
||||
|
||||
class MyDiv(MyFt):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("div", *args, **kwargs)
|
||||
|
||||
|
||||
class MySpan(MyFt):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("span", *args, **kwargs)
|
||||
|
||||
|
||||
def mk_my_ellipsis(txt: str, cls='', **kwargs):
|
||||
merged_cls = merge_classes("truncate",
|
||||
cls,
|
||||
kwargs)
|
||||
return MyDiv(txt, cls=merged_cls, data_tooltip=txt, **kwargs)
|
||||
|
||||
|
||||
def mk_my_icon(icon, size=20, can_select=True, can_hover=False, cls='', tooltip=None, **kwargs):
|
||||
merged_cls = merge_classes(f"icon-{size}",
|
||||
'icon-btn' if can_select else '',
|
||||
'mmt-btn' if can_hover else '',
|
||||
cls,
|
||||
kwargs)
|
||||
return mk_my_tooltip(icon, tooltip, cls=merged_cls, **kwargs) if tooltip else MyDiv(icon, cls=merged_cls, **kwargs)
|
||||
|
||||
|
||||
def mk_my_tooltip(element, tooltip: str, cls='', **kwargs):
|
||||
merged_cls = merge_classes("mmt-tooltip",
|
||||
cls,
|
||||
kwargs)
|
||||
return MyDiv(element, cls=merged_cls, data_tooltip=tooltip, **kwargs)
|
||||
@@ -47,6 +47,10 @@ class InstanceManager:
|
||||
|
||||
return InstanceManager._instances[key]
|
||||
|
||||
@staticmethod
|
||||
def new(session, instance_type, **kwargs):
|
||||
return InstanceManager.get(session, instance_type.create_component_id(session), instance_type, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def register(session: dict | None, instance, instance_id: str = None):
|
||||
"""
|
||||
|
||||
@@ -10,7 +10,7 @@ from core.Expando import Expando
|
||||
JIRA_ROOT = "https://altares.atlassian.net/rest/api/3"
|
||||
DEFAULT_HEADERS = {"Accept": "application/json"}
|
||||
DEFAULT_SEARCH_FIELDS = "summary,status,assignee"
|
||||
logger = logging.getLogger("jql")
|
||||
logger = logging.getLogger("Jira")
|
||||
|
||||
|
||||
class NotFound(Exception):
|
||||
@@ -18,8 +18,10 @@ class NotFound(Exception):
|
||||
|
||||
|
||||
class JiraRequestTypes(Enum):
|
||||
Issues = "issues"
|
||||
Search = "search"
|
||||
Issue = "issue"
|
||||
Comments = "comments"
|
||||
Versions = "versions"
|
||||
|
||||
|
||||
class Jira:
|
||||
@@ -41,7 +43,10 @@ class Jira:
|
||||
self.fields = fields
|
||||
|
||||
def test(self):
|
||||
logger.debug(f"test with no parameters")
|
||||
|
||||
url = f"{JIRA_ROOT}/myself"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
@@ -49,16 +54,21 @@ class Jira:
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
return response
|
||||
|
||||
def issue(self, issue_id: str) -> Expando:
|
||||
def issue(self, issue_id: str) -> list[Expando]:
|
||||
"""
|
||||
Retrieve an issue
|
||||
:param issue_id:
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"comments with {issue_id=}")
|
||||
|
||||
url = f"{JIRA_ROOT}/issue/{issue_id}"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
@@ -66,8 +76,10 @@ class Jira:
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
return Expando(json.loads(response.text))
|
||||
return [Expando(json.loads(response.text))]
|
||||
|
||||
def fields(self) -> list[Expando]:
|
||||
"""
|
||||
@@ -86,14 +98,14 @@ class Jira:
|
||||
as_dict = json.loads(response.text)
|
||||
return [Expando(field) for field in as_dict]
|
||||
|
||||
def issues(self, jql: str, fields=None) -> list[Expando]:
|
||||
def search(self, jql: str, fields=None) -> list[Expando]:
|
||||
"""
|
||||
Executes a JQL and returns the list of issues
|
||||
:param jql:
|
||||
:param fields: list of fields to retrieve
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"Processing jql '{jql}'")
|
||||
logger.debug(f"search with {jql=}, {fields=}")
|
||||
|
||||
if not jql:
|
||||
raise ValueError("Jql cannot be empty.")
|
||||
@@ -102,6 +114,7 @@ class Jira:
|
||||
fields = self.fields
|
||||
|
||||
url = f"{JIRA_ROOT}/search"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
headers = DEFAULT_HEADERS.copy()
|
||||
headers["Content-Type"] = "application/json"
|
||||
@@ -113,15 +126,19 @@ class Jira:
|
||||
"maxResults": 500, # Does not seem to be used. It's always 100 !
|
||||
"startAt": 0
|
||||
}
|
||||
logger.debug(f" payload: {payload}")
|
||||
|
||||
result = []
|
||||
while True:
|
||||
logger.debug(f"Request startAt '{payload['startAt']}'")
|
||||
logger.debug(f" Request startAt '{payload['startAt']}'")
|
||||
response = requests.request("POST",
|
||||
url,
|
||||
data=json.dumps(payload),
|
||||
headers=headers,
|
||||
auth=self.auth)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(self._format_error(response))
|
||||
@@ -130,6 +147,7 @@ class Jira:
|
||||
result += as_dict["issues"]
|
||||
|
||||
if as_dict["startAt"] + as_dict["maxResults"] >= as_dict["total"]:
|
||||
logger.debug(f" response: {response}")
|
||||
# We retrieve more than the total nuber of items
|
||||
break
|
||||
|
||||
@@ -143,12 +161,18 @@ class Jira:
|
||||
:param issue_id:
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"comments with {issue_id=}")
|
||||
|
||||
url = f"{JIRA_ROOT}/issue/{issue_id}/comment"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request("GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth)
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(self._format_error(response))
|
||||
|
||||
@@ -156,6 +180,34 @@ class Jira:
|
||||
result = as_dict["comments"]
|
||||
return [Expando(issue) for issue in result]
|
||||
|
||||
def versions(self, project_key):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
returns fixVersion number in JIRA
|
||||
:param project_key:
|
||||
:return:
|
||||
"""
|
||||
logger.debug(f"versions with {project_key=}")
|
||||
|
||||
url = f"{JIRA_ROOT}/project/{project_key}/versions"
|
||||
logger.debug(f" url: {url}")
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
logger.debug(f" response: {response}")
|
||||
logger.debug(f" response.text: {response.text}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise NotFound()
|
||||
|
||||
as_list = json.loads(response.text)
|
||||
return [Expando(version) for version in as_list]
|
||||
|
||||
def extract(self, jql, mappings, updates=None) -> list[dict]:
|
||||
"""
|
||||
Executes a jql and returns list of dict
|
||||
@@ -188,30 +240,6 @@ class Jira:
|
||||
row = {cvs_col: issue.get(jira_path) for jira_path, cvs_col in mappings.items() if cvs_col is not None}
|
||||
yield row
|
||||
|
||||
def get_versions(self, project_key):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
returns fixVersion number in JIRA
|
||||
:param project_key:
|
||||
:param version_name:
|
||||
:return:
|
||||
"""
|
||||
|
||||
url = f"{JIRA_ROOT}/project/{project_key}/versions"
|
||||
|
||||
response = requests.request(
|
||||
"GET",
|
||||
url,
|
||||
headers=DEFAULT_HEADERS,
|
||||
auth=self.auth
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise NotFound()
|
||||
|
||||
as_list = json.loads(response.text)
|
||||
return [Expando(version) for version in as_list]
|
||||
|
||||
def get_version(self, project_key, version_name):
|
||||
"""
|
||||
Given a project name and a version name
|
||||
@@ -221,7 +249,7 @@ class Jira:
|
||||
:return:
|
||||
"""
|
||||
|
||||
for version in self.get_versions(project_key):
|
||||
for version in self.versions(project_key):
|
||||
if version.name == version_name:
|
||||
return version
|
||||
|
||||
|
||||
189
src/core/preprocessor.py
Normal file
189
src/core/preprocessor.py
Normal file
@@ -0,0 +1,189 @@
|
||||
from arpeggio import RegExMatch, ZeroOrMore, OneOrMore, ParserPython, EOF, NoMatch
|
||||
|
||||
|
||||
class VariableParsingError(Exception):
|
||||
"""Custom exception for variable parsing errors"""
|
||||
|
||||
def __init__(self, message, position):
|
||||
self.message = message
|
||||
self.position = position
|
||||
super().__init__(f"Variable parsing error at position {position}: {message}")
|
||||
|
||||
|
||||
class VariableProcessingError(Exception):
|
||||
"""Custom exception for variable parsing errors"""
|
||||
|
||||
def __init__(self, message, position):
|
||||
self.message = message
|
||||
self.position = position
|
||||
super().__init__(f"Variable processing error at position {position}: {message}")
|
||||
|
||||
|
||||
def variable_name():
|
||||
"""Variable name: alphanumeric characters and underscores"""
|
||||
return RegExMatch(r'[a-zA-Z_][a-zA-Z0-9_]*')
|
||||
|
||||
|
||||
def property_name():
|
||||
"""Property name: same rules as variable name"""
|
||||
return RegExMatch(r'[a-zA-Z_][a-zA-Z0-9_]*')
|
||||
|
||||
|
||||
def variable_property():
|
||||
"""A property access: .property_name"""
|
||||
return ".", property_name
|
||||
|
||||
|
||||
def variable():
|
||||
"""A complete variable: $variable_name(.property)*"""
|
||||
return "$", variable_name, ZeroOrMore(variable_property)
|
||||
|
||||
|
||||
def text_char():
|
||||
"""Any character that is not the start of a variable"""
|
||||
return RegExMatch(r'[^$]')
|
||||
|
||||
|
||||
def text_segment():
|
||||
"""One or more non-variable characters"""
|
||||
return OneOrMore(text_char)
|
||||
|
||||
|
||||
def element():
|
||||
"""Either a variable or a text segment"""
|
||||
return [variable, text_segment]
|
||||
|
||||
|
||||
def expression():
|
||||
"""Complete expression: sequence of elements"""
|
||||
return ZeroOrMore(element), EOF
|
||||
|
||||
|
||||
class PlainTextPreprocessor:
|
||||
def __init__(self):
|
||||
self.parser = ParserPython(expression, debug=False, skipws=False)
|
||||
|
||||
@staticmethod
|
||||
def _post_validation(elements):
|
||||
if len(elements) < 2:
|
||||
return
|
||||
|
||||
for element, next_element in [(element, elements[i + 1]) for i, element in enumerate(elements[:-1])]:
|
||||
if element['type'] == 'variable' and next_element['type'] == 'variable':
|
||||
raise VariableParsingError("Invalid syntax.", next_element['start'])
|
||||
|
||||
@staticmethod
|
||||
def _extract_elements_from_tree(parse_tree, original_text):
|
||||
"""Extract elements with positions from the parse tree"""
|
||||
elements = []
|
||||
|
||||
def process_node(node, current_pos=0):
|
||||
nonlocal elements
|
||||
|
||||
if hasattr(node, 'rule_name'):
|
||||
if node.rule_name == 'variable':
|
||||
# Extract variable information
|
||||
var_start = node.position
|
||||
var_end = node.position_end
|
||||
var_text = original_text[var_start:var_end]
|
||||
|
||||
parts = var_text[1:].split('.') # Remove $ and split by .
|
||||
var_name = parts[0]
|
||||
properties = parts[1:] if len(parts) > 1 else []
|
||||
|
||||
elements.append({
|
||||
"type": "variable",
|
||||
"name": var_name,
|
||||
"properties": properties,
|
||||
"start": var_start,
|
||||
"end": var_end
|
||||
})
|
||||
|
||||
elif node.rule_name == 'text_segment':
|
||||
# Extract text segment
|
||||
text_start = node.position
|
||||
text_end = node.position_end
|
||||
content = original_text[text_start:text_end]
|
||||
|
||||
stripped = content.strip()
|
||||
if len(stripped) > 0 and stripped[0] == '.':
|
||||
raise VariableParsingError("Invalid syntax in property name.", text_start)
|
||||
|
||||
elements.append({
|
||||
"type": "text",
|
||||
"content": content,
|
||||
"start": text_start,
|
||||
"end": text_end
|
||||
})
|
||||
|
||||
elif node.rule_name in ('expression', 'element'):
|
||||
for child in node:
|
||||
process_node(child, current_pos)
|
||||
|
||||
# Process children
|
||||
if hasattr(node, '_tx_children') and node._tx_children:
|
||||
for child in node._tx_children:
|
||||
process_node(child, current_pos)
|
||||
|
||||
process_node(parse_tree)
|
||||
return elements
|
||||
|
||||
def parse(self, text):
|
||||
"""
|
||||
Parse text and return structure with text segments and variables with positions
|
||||
|
||||
Returns:
|
||||
[
|
||||
{"type": "text", "content": "...", "start": int, "end": int},
|
||||
{"type": "variable", "name": "...", "properties": [...], "start": int, "end": int}
|
||||
]
|
||||
"""
|
||||
if not text:
|
||||
return []
|
||||
|
||||
try:
|
||||
# Parse the text
|
||||
parse_tree = self.parser.parse(text)
|
||||
|
||||
# Extract elements from parse tree
|
||||
elements = self._extract_elements_from_tree(parse_tree, text)
|
||||
|
||||
# Extra validations
|
||||
self._post_validation(elements)
|
||||
|
||||
# Sort elements by start position
|
||||
elements.sort(key=lambda x: x['start'])
|
||||
|
||||
return elements
|
||||
|
||||
except NoMatch as e:
|
||||
# Convert Arpeggio parsing errors to our custom error
|
||||
raise VariableParsingError(f"Invalid syntax", e.position)
|
||||
except Exception as e:
|
||||
if isinstance(e, VariableParsingError):
|
||||
raise
|
||||
raise VariableParsingError(f"Parsing failed: {str(e)}", 0)
|
||||
|
||||
def preprocess(self, text, namepace):
|
||||
result = ""
|
||||
elements = self.parse(text)
|
||||
for element in elements:
|
||||
if element['type'] == 'text':
|
||||
result += element['content']
|
||||
elif element['type'] == 'variable':
|
||||
value = namepace.get(element['name'])
|
||||
if value is None:
|
||||
raise VariableProcessingError(f"Variable '{element['name']}' is not defined.", element['start'])
|
||||
|
||||
try:
|
||||
pos = element['start'] + len(element['name']) + 1 # +1 for the starting '$'
|
||||
for property_name in element['properties']:
|
||||
value = getattr(value, property_name)
|
||||
pos += len(property_name) + 1 # +1 for the dot '.'
|
||||
except AttributeError as e:
|
||||
raise VariableProcessingError(f"Invalid property '{property_name}' for variable '{element['name']}'.",
|
||||
pos) from e
|
||||
|
||||
result += str(value)
|
||||
|
||||
return result
|
||||
@@ -98,10 +98,10 @@ class SettingsManager:
|
||||
user_id, user_email = self._get_user(session)
|
||||
return self._db_engine.save(user_id, user_email, entry, obj)
|
||||
|
||||
def load(self, session: dict, entry: str, default=NoDefault):
|
||||
def load(self, session: dict, entry: str, digest=None, default=NoDefault):
|
||||
user_id, _ = self._get_user(session)
|
||||
try:
|
||||
return self._db_engine.load(user_id, entry)
|
||||
return self._db_engine.load(user_id, entry, digest)
|
||||
except DbException:
|
||||
return default
|
||||
|
||||
@@ -128,6 +128,14 @@ class SettingsManager:
|
||||
|
||||
return self._db_engine.exists(user_id, entry)
|
||||
|
||||
def get_digest(self, session: dict, entry: str):
|
||||
user_id, _ = self._get_user(session)
|
||||
return self._db_engine.get_digest(user_id, entry)
|
||||
|
||||
def history(self, session, entry, digest=None, max_items=1000):
|
||||
user_id, _ = self._get_user(session)
|
||||
return self._db_engine.history(user_id, entry, digest, max_items)
|
||||
|
||||
def get_db_engine(self):
|
||||
return self._db_engine
|
||||
|
||||
@@ -177,7 +185,7 @@ class GenericDbManager:
|
||||
if key.startswith("_"):
|
||||
super().__setattr__(key, value)
|
||||
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
|
||||
if not (hasattr(settings, key)):
|
||||
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{key}'.")
|
||||
|
||||
@@ -188,7 +196,7 @@ class GenericDbManager:
|
||||
if item.startswith("_"):
|
||||
return super().__getattribute__(item)
|
||||
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
|
||||
if not (hasattr(settings, item)):
|
||||
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{item}'.")
|
||||
|
||||
@@ -250,7 +258,7 @@ class NestedSettingsManager:
|
||||
self._settings_manager.save(self._session, self._obj_entry, settings)
|
||||
|
||||
def _get_settings_and_object(self):
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, self._obj_type())
|
||||
settings = self._settings_manager.load(self._session, self._obj_entry, default=self._obj_type())
|
||||
if not hasattr(settings, self._obj_attribute):
|
||||
raise AttributeError(f"Settings '{self._obj_entry}' has no attribute '{self._obj_attribute}'.")
|
||||
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import ast
|
||||
import base64
|
||||
import cProfile
|
||||
import functools
|
||||
import hashlib
|
||||
import importlib
|
||||
import inspect
|
||||
import pkgutil
|
||||
import re
|
||||
import time
|
||||
import types
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from io import BytesIO
|
||||
from urllib.parse import urlparse
|
||||
@@ -420,6 +424,66 @@ def split_host_port(url):
|
||||
return host, port
|
||||
|
||||
|
||||
def timed(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
start = time.perf_counter()
|
||||
result = func(*args, **kwargs)
|
||||
end = time.perf_counter()
|
||||
|
||||
# get class name
|
||||
class_name = None
|
||||
if args:
|
||||
# check the first argument to see if it's a class'
|
||||
if inspect.isclass(args[0]):
|
||||
class_name = args[0].__name__ # class method
|
||||
elif hasattr(args[0], "__class__"):
|
||||
class_name = args[0].__class__.__name__ # instance method
|
||||
|
||||
if class_name:
|
||||
print(f"[PERF] {class_name}.{func.__name__} took {end - start:.4f} sec")
|
||||
else:
|
||||
print(f"[PERF] {func.__name__} took {end - start:.4f} sec")
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def profile_function(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
profiler = cProfile.Profile()
|
||||
try:
|
||||
profiler.enable()
|
||||
result = func(*args, **kwargs)
|
||||
finally:
|
||||
profiler.disable()
|
||||
|
||||
# Determine class name if any
|
||||
class_name = None
|
||||
if args:
|
||||
if inspect.isclass(args[0]):
|
||||
class_name = args[0].__name__ # class method
|
||||
elif hasattr(args[0], "__class__"):
|
||||
class_name = args[0].__class__.__name__ # instance method
|
||||
|
||||
# Compose filename with timestamp
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if class_name:
|
||||
filename = f"{class_name}_{func.__name__}_{timestamp}.prof"
|
||||
else:
|
||||
filename = f"{func.__name__}_{timestamp}.prof"
|
||||
|
||||
# Dump stats to file
|
||||
profiler.dump_stats(filename)
|
||||
print(f"[PROFILE] Profiling data saved to {filename}")
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class UnreferencedNamesVisitor(ast.NodeVisitor):
|
||||
"""
|
||||
Try to find symbols that will be requested by the ast
|
||||
@@ -463,5 +527,4 @@ class UnreferencedNamesVisitor(ast.NodeVisitor):
|
||||
:rtype:
|
||||
"""
|
||||
self.names.add(node.arg)
|
||||
self.visit_selected(node, ["value"])
|
||||
|
||||
self.visit_selected(node, ["value"])
|
||||
@@ -47,4 +47,9 @@ loggers:
|
||||
AddStuffApp:
|
||||
level: INFO
|
||||
handlers: [ console ]
|
||||
propagate: False
|
||||
|
||||
Jira:
|
||||
level: DEBUG
|
||||
handlers: [ console ]
|
||||
propagate: False
|
||||
53
src/main.py
53
src/main.py
@@ -1,5 +1,4 @@
|
||||
# global layout
|
||||
import asyncio
|
||||
import logging.config
|
||||
|
||||
import yaml
|
||||
@@ -147,6 +146,8 @@ register_component("theme_controller", "components.themecontroller", "ThemeContr
|
||||
register_component("main_layout", "components.drawerlayout", "DrawerLayoutApp")
|
||||
register_component("undo_redo", "components.undo_redo", "UndoRedoApp")
|
||||
register_component("tabs", "components.tabs", "TabsApp") # before repositories
|
||||
register_component("entryselector", "components.entryselector", "EntrySelectorApp")
|
||||
register_component("jsonviewer", "components.jsonviewer", "JsonViewerApp")
|
||||
register_component("applications", "components.applications", "ApplicationsApp")
|
||||
register_component("repositories", "components.repositories", "RepositoriesApp")
|
||||
register_component("workflows", "components.workflows", "WorkflowsApp")
|
||||
@@ -211,6 +212,25 @@ app, rt = fast_app(
|
||||
pico=False,
|
||||
)
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Profiling middleware
|
||||
# -------------------------
|
||||
# @app.middleware("http")
|
||||
async def timing_middleware(request, call_next):
|
||||
import time
|
||||
start_total = time.perf_counter()
|
||||
|
||||
# Call the next middleware or route handler
|
||||
response = await call_next(request)
|
||||
|
||||
end_total = time.perf_counter()
|
||||
elapsed = end_total - start_total
|
||||
|
||||
print(f"[PERF] Total server time: {elapsed:.4f} sec - Path: {request.url.path}")
|
||||
return response
|
||||
|
||||
|
||||
settings_manager = SettingsManager()
|
||||
|
||||
import_settings = AdminImportSettings(settings_manager, None)
|
||||
@@ -253,6 +273,17 @@ def get(session):
|
||||
DrawerLayoutOld(pages),)
|
||||
|
||||
|
||||
@rt('/toasting')
|
||||
def get(session):
|
||||
# Normally one toast is enough, this allows us to see
|
||||
# different toast types in action.
|
||||
add_toast(session, f"Toast is being cooked", "info")
|
||||
add_toast(session, f"Toast is ready", "success")
|
||||
add_toast(session, f"Toast is getting a bit crispy", "warning")
|
||||
add_toast(session, f"Toast is burning!", "error")
|
||||
return Titled("I like toast")
|
||||
|
||||
|
||||
# Error Handling
|
||||
@app.get("/{path:path}")
|
||||
def not_found(path: str, session=None):
|
||||
@@ -275,18 +306,7 @@ def not_found(path: str, session=None):
|
||||
setup_toasts(app)
|
||||
|
||||
|
||||
@rt('/toasting')
|
||||
def get(session):
|
||||
# Normally one toast is enough, this allows us to see
|
||||
# different toast types in action.
|
||||
add_toast(session, f"Toast is being cooked", "info")
|
||||
add_toast(session, f"Toast is ready", "success")
|
||||
add_toast(session, f"Toast is getting a bit crispy", "warning")
|
||||
add_toast(session, f"Toast is burning!", "error")
|
||||
return Titled("I like toast")
|
||||
|
||||
|
||||
async def main():
|
||||
def main():
|
||||
logger.info(f" Starting FastHTML server on http://localhost:{APP_PORT}")
|
||||
serve(port=APP_PORT)
|
||||
|
||||
@@ -294,9 +314,4 @@ async def main():
|
||||
if __name__ == "__main__":
|
||||
# Start your application
|
||||
logger.info("Application starting...")
|
||||
try:
|
||||
asyncio.run(main())
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\nStopping application...")
|
||||
except Exception as e:
|
||||
logger.error(f"Error: {e}")
|
||||
main()
|
||||
|
||||
@@ -1,14 +1,25 @@
|
||||
import ast
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Generator
|
||||
|
||||
from components.admin.admin_db_manager import AdminDbManager
|
||||
from core.Expando import Expando
|
||||
from core.jira import Jira
|
||||
from core.jira import Jira, JiraRequestTypes
|
||||
from core.preprocessor import PlainTextPreprocessor
|
||||
from core.utils import UnreferencedNamesVisitor
|
||||
from utils.Datahelper import DataHelper
|
||||
|
||||
|
||||
@dataclass
|
||||
class WorkflowPayload:
|
||||
processor_name: str
|
||||
component_id: str
|
||||
item_linkage_id: int
|
||||
item: Any
|
||||
|
||||
|
||||
class DataProcessorError(Exception):
|
||||
def __init__(self, component_id, error):
|
||||
self.component_id = component_id
|
||||
@@ -92,21 +103,32 @@ class TableDataProducer(DataProducer):
|
||||
class JiraDataProducer(DataProducer):
|
||||
"""Base class for data producers that emit data from Jira."""
|
||||
|
||||
def __init__(self, session, settings_manager, component_id, request_type='issues', request='', fields=None):
|
||||
logger = logging.getLogger("DataProcessor.Producer.Jira")
|
||||
|
||||
def __init__(self, session, settings_manager, component_id, request_type='search', request='', fields=None):
|
||||
super().__init__(component_id)
|
||||
self._session = session
|
||||
self.settings_manager = settings_manager
|
||||
self.request_type = request_type
|
||||
self.request_type = request_type.value if isinstance(request_type, JiraRequestTypes) else request_type
|
||||
self.request = request
|
||||
self.fields = fields
|
||||
self.db = AdminDbManager(session, settings_manager).jira
|
||||
|
||||
def emit(self, data: Any = None) -> Generator[Any, None, None]:
|
||||
jira = Jira(self.db.user_name, self.db.api_token, fields=self.fields)
|
||||
self.logger.debug(f"Emitting data from Jira: {self.request_type} {self.request} {self.fields}")
|
||||
|
||||
preprocessor = PlainTextPreprocessor()
|
||||
preprocessed_fields = preprocessor.preprocess(self.fields, {"data": data})
|
||||
self.logger.debug(f" {preprocessed_fields=}")
|
||||
|
||||
jira = Jira(self.db.user_name, self.db.api_token, fields=preprocessed_fields)
|
||||
if not hasattr(jira, self.request_type):
|
||||
raise ValueError(f"Invalid request type: {self.request_type}")
|
||||
|
||||
yield from getattr(jira, self.request_type)(self.request)
|
||||
preprocessed_request = preprocessor.preprocess(self.request, {"data": data})
|
||||
self.logger.debug(f" {preprocessed_request=}")
|
||||
|
||||
yield from getattr(jira, self.request_type)(preprocessed_request)
|
||||
|
||||
|
||||
class DefaultDataFilter(DataFilter):
|
||||
@@ -133,35 +155,56 @@ class WorkflowEngine:
|
||||
self.has_error = False
|
||||
self.global_error = None
|
||||
self.errors = {}
|
||||
self.debug = {}
|
||||
self.nb_items = -1
|
||||
|
||||
def add_processor(self, processor: DataProcessor) -> 'WorkflowEngine':
|
||||
"""Add a data processor to the pipeline."""
|
||||
self.processors.append(processor)
|
||||
return self
|
||||
|
||||
def _process_single_item(self, item: Any, processor_index: int = 0) -> Generator[Any, None, None]:
|
||||
def _process_single_item(self, item_linkage_id, item: Any, processor_index: int = 0) -> Generator[Any, None, None]:
|
||||
"""Process a single item through the remaining processors."""
|
||||
if processor_index >= len(self.processors):
|
||||
yield item
|
||||
return
|
||||
|
||||
processor = self.processors[processor_index]
|
||||
if not processor.component_id in self.debug:
|
||||
self.debug[processor.component_id] = {"input": [], "output": []}
|
||||
|
||||
self.debug[processor.component_id]["input"].append(WorkflowPayload(
|
||||
processor_name=processor.__class__.__name__,
|
||||
component_id=processor.component_id,
|
||||
item_linkage_id=item_linkage_id,
|
||||
item=item))
|
||||
|
||||
# Process the item through the current processor
|
||||
for processed_item in processor.process(item):
|
||||
self.debug[processor.component_id]["output"].append(WorkflowPayload(
|
||||
processor_name=processor.__class__.__name__,
|
||||
component_id=processor.component_id,
|
||||
item_linkage_id=item_linkage_id,
|
||||
item=processed_item))
|
||||
|
||||
# Recursively process through remaining processors
|
||||
yield from self._process_single_item(processed_item, processor_index + 1)
|
||||
yield from self._process_single_item(item_linkage_id, processed_item, processor_index + 1)
|
||||
|
||||
def run(self) -> Generator[Any, None, None]:
|
||||
"""
|
||||
Run the workflow pipeline and yield results one by one.
|
||||
The first processor must be a DataProducer.
|
||||
"""
|
||||
|
||||
self.debug.clear()
|
||||
|
||||
if not self.processors:
|
||||
self.has_error = False
|
||||
self.global_error = "No processors in the pipeline"
|
||||
self.nb_items = -1
|
||||
raise ValueError(self.global_error)
|
||||
|
||||
self.nb_items = 0
|
||||
first_processor = self.processors[0]
|
||||
|
||||
if not isinstance(first_processor, DataProducer):
|
||||
@@ -169,8 +212,16 @@ class WorkflowEngine:
|
||||
self.global_error = "First processor must be a DataProducer"
|
||||
raise ValueError(self.global_error)
|
||||
|
||||
for item in first_processor.process(None):
|
||||
yield from self._process_single_item(item, 1)
|
||||
self.debug[first_processor.component_id] = {"input": [], "output": []}
|
||||
|
||||
for item_linkage_id, item in enumerate(first_processor.process(None)):
|
||||
self.nb_items += 1
|
||||
self.debug[first_processor.component_id]["output"].append(WorkflowPayload(
|
||||
processor_name=first_processor.__class__.__name__,
|
||||
component_id=first_processor.component_id,
|
||||
item_linkage_id=item_linkage_id,
|
||||
item=item))
|
||||
yield from self._process_single_item(item_linkage_id, item, 1)
|
||||
|
||||
def run_to_list(self) -> list[Any]:
|
||||
"""
|
||||
|
||||
@@ -642,10 +642,10 @@ def extract_table_values_new(ft, header=True):
|
||||
# first, get the header
|
||||
|
||||
if header:
|
||||
header = search_elements_by_name(ft, attrs={"class": "dt2-header"}, comparison_method='contains')[0]
|
||||
header_element = search_elements_by_name(ft, attrs={"class": "dt2-header"}, comparison_method='contains')[0]
|
||||
header_map = {}
|
||||
res = OrderedDict()
|
||||
for row in header.children:
|
||||
for row in header_element.children:
|
||||
col_id = row.attrs["data-col"]
|
||||
title = row.attrs["data-tooltip"]
|
||||
header_map[col_id] = title
|
||||
@@ -654,9 +654,10 @@ def extract_table_values_new(ft, header=True):
|
||||
body = search_elements_by_name(ft, attrs={"class": "dt2-body"}, comparison_method='contains')[0]
|
||||
for row in body.children:
|
||||
for col in row.children:
|
||||
col_id = col.attrs["data-col"]
|
||||
cell_value = _get_cell_content_value(col)
|
||||
res[header_map[col_id]].append(cell_value)
|
||||
if hasattr(col, "attrs"):
|
||||
col_id = col.attrs["data-col"]
|
||||
cell_value = _get_cell_content_value(col)
|
||||
res[header_map[col_id]].append(cell_value)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
@@ -509,3 +509,18 @@ def test_i_can_compute_footer_menu_position_when_not_enough_space(dg):
|
||||
)
|
||||
|
||||
assert matches(menu, expected)
|
||||
|
||||
|
||||
def test_the_content_of_the_cell_is_escaped(empty_dg):
|
||||
df = pd.DataFrame({
|
||||
'value': ['<div> My Content </div>'],
|
||||
'value2': ['{My Content}'],
|
||||
})
|
||||
my_dg = empty_dg.init_from_dataframe(df)
|
||||
|
||||
actual = my_dg.__ft__()
|
||||
table_content = extract_table_values_new(actual, header=True)
|
||||
|
||||
assert table_content == OrderedDict({
|
||||
'value': ['<div> My Content </div>'],
|
||||
'value2': ['{My Content}']})
|
||||
|
||||
@@ -235,3 +235,40 @@ def test_put_many_save_only_if_necessary(engine):
|
||||
|
||||
entry_content = engine.load(FAKE_USER_ID, "MyEntry")
|
||||
assert entry_content[TAG_PARENT] == [None] # Still None, nothing was save
|
||||
|
||||
|
||||
def test_i_can_retrieve_history_using_put(engine):
|
||||
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(1, "a", False))
|
||||
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(2, "a", False))
|
||||
engine.put(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", "key1", DummyObj(3, "a", False))
|
||||
|
||||
history = engine.history(FAKE_USER_ID, "MyEntry")
|
||||
assert len(history) == 3
|
||||
|
||||
v0 = engine.load(FAKE_USER_ID, "MyEntry", history[0])
|
||||
v1 = engine.load(FAKE_USER_ID, "MyEntry", history[1])
|
||||
v2 = engine.load(FAKE_USER_ID, "MyEntry", history[2])
|
||||
|
||||
assert v0["key1"] == DummyObj(3, "a", False)
|
||||
assert v1["key1"] == DummyObj(2, "a", False)
|
||||
assert v2["key1"] == DummyObj(1, "a", False)
|
||||
|
||||
assert v2[TAG_PARENT] == [None]
|
||||
|
||||
def test_i_can_retrieve_history_using_save(engine):
|
||||
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(1, "a", False)})
|
||||
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(2, "a", False)})
|
||||
engine.save(FAKE_USER_ID, FAKE_USER_EMAIL, "MyEntry", {"key1" : DummyObj(3, "a", False)})
|
||||
|
||||
history = engine.history(FAKE_USER_ID, "MyEntry")
|
||||
assert len(history) == 3
|
||||
|
||||
v0 = engine.load(FAKE_USER_ID, "MyEntry", history[0])
|
||||
v1 = engine.load(FAKE_USER_ID, "MyEntry", history[1])
|
||||
v2 = engine.load(FAKE_USER_ID, "MyEntry", history[2])
|
||||
|
||||
assert v0["key1"] == DummyObj(3, "a", False)
|
||||
assert v1["key1"] == DummyObj(2, "a", False)
|
||||
assert v2["key1"] == DummyObj(1, "a", False)
|
||||
|
||||
assert v2[TAG_PARENT] == [None]
|
||||
228
tests/test_hooks.py
Normal file
228
tests/test_hooks.py
Normal file
@@ -0,0 +1,228 @@
|
||||
import pytest
|
||||
|
||||
from components.jsonviewer.hooks import (
|
||||
HookContext, EventType, Hook, HookManager, HookBuilder,
|
||||
WhenLongText, WhenEditable, WhenType, WhenKey, WhenPath, WhenValue,
|
||||
CompositeCondition
|
||||
)
|
||||
|
||||
|
||||
# HookContext test helper
|
||||
def create_mock_context(value=None, key=None, json_path=None, parent_node=None, node_type=None, children=None):
|
||||
"""Helper to create a mock HookContext for testing."""
|
||||
|
||||
class Node:
|
||||
def __init__(self, value, node_type=None, children=None):
|
||||
self.value = value
|
||||
self.__class__.__name__ = node_type or "MockNode"
|
||||
self.children = children or []
|
||||
|
||||
mock_node = Node(value, node_type=node_type, children=children)
|
||||
return HookContext(key=key, node=mock_node, helper=None, jsonviewer=None, json_path=json_path,
|
||||
parent_node=parent_node)
|
||||
|
||||
|
||||
# ================
|
||||
# Test Conditions
|
||||
# ================
|
||||
|
||||
@pytest.mark.parametrize("text, threshold, expected", [
|
||||
("This is a very long text." * 10, 50, True), # Long text, above threshold
|
||||
("Short text", 50, False), # Short text, below threshold
|
||||
])
|
||||
def test_i_can_detect_long_text(text, threshold, expected):
|
||||
context = create_mock_context(value=text)
|
||||
condition = WhenLongText(threshold=threshold)
|
||||
assert condition.evaluate(context) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("json_path, editable_paths, editable_types, node_value, is_leaf, expected", [
|
||||
("root.editable.value", ["root.editable.value"], None, "Editable value", True, True), # Editable path matches
|
||||
("root.not_editable.value", ["root.editable.value"], None, "Editable value", True, False),
|
||||
# Editable path does not match
|
||||
("root.editable.numeric", [], [int], 10, True, True), # Type is editable (int)
|
||||
("root.editable.string", [], [int], "Non-editable value", True, False) # Type is not editable
|
||||
])
|
||||
def test_i_can_detect_editable(json_path, editable_paths, editable_types, node_value, is_leaf, expected):
|
||||
context = create_mock_context(value=node_value, json_path=json_path)
|
||||
context.is_leaf_node = lambda: is_leaf # Mock is_leaf_node behavior
|
||||
condition = WhenEditable(editable_paths=editable_paths, editable_types=editable_types)
|
||||
assert condition.evaluate(context) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("node_value, target_type, expected", [
|
||||
(123, int, True), # Matches target type
|
||||
("String value", int, False) # Does not match target type
|
||||
])
|
||||
def test_i_can_detect_type_match(node_value, target_type, expected):
|
||||
context = create_mock_context(value=node_value)
|
||||
condition = WhenType(target_type=target_type)
|
||||
assert condition.evaluate(context) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("key, key_pattern, expected", [
|
||||
("target_key", "target_key", True), # Exact match
|
||||
("target_key", lambda k: k.startswith("target"), True), # Callable match
|
||||
("wrong_key", "target_key", False) # Pattern does not match
|
||||
])
|
||||
def test_i_can_match_key(key, key_pattern, expected):
|
||||
context = create_mock_context(key=key)
|
||||
condition = WhenKey(key_pattern=key_pattern)
|
||||
assert condition.evaluate(context) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("json_path, path_pattern, expected", [
|
||||
("root.items[0].name", r"root\.items\[\d+\]\.name", True), # Matches pattern
|
||||
("root.invalid_path", r"root\.items\[\d+\]\.name", False) # Does not match
|
||||
])
|
||||
def test_i_can_match_path(json_path, path_pattern, expected):
|
||||
context = create_mock_context(json_path=json_path)
|
||||
condition = WhenPath(path_pattern=path_pattern)
|
||||
assert condition.evaluate(context) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value, target_value, predicate, expected", [
|
||||
(123, 123, None, True), # Direct match
|
||||
(123, 456, None, False), # Direct mismatch
|
||||
(150, None, lambda v: v > 100, True), # Satisfies predicate
|
||||
(50, None, lambda v: v > 100, False), # Does not satisfy predicate
|
||||
])
|
||||
def test_i_can_detect_value(value, target_value, predicate, expected):
|
||||
context = create_mock_context(value=value)
|
||||
condition = WhenValue(target_value=target_value, predicate=predicate)
|
||||
assert condition.evaluate(context) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value, conditions, operator, expected", [
|
||||
(200, [WhenValue(predicate=lambda v: v > 100), WhenType(target_type=int)], "AND", True),
|
||||
# Both conditions pass (AND)
|
||||
(200, [WhenValue(predicate=lambda v: v > 100), WhenType(target_type=str)], "AND", False),
|
||||
# One condition fails (AND)
|
||||
(200, [WhenValue(predicate=lambda v: v > 100), WhenType(target_type=str)], "OR", True),
|
||||
# At least one passes (OR)
|
||||
(50, [], "AND", True), # No conditions (default True for AND/OR)
|
||||
])
|
||||
def test_i_can_combine_conditions(value, conditions, operator, expected):
|
||||
context = create_mock_context(value=value)
|
||||
composite = CompositeCondition(conditions=conditions, operator=operator)
|
||||
assert composite.evaluate(context) == expected
|
||||
|
||||
|
||||
# ================
|
||||
# Test Hooks
|
||||
# ================
|
||||
|
||||
@pytest.mark.parametrize("event_type, actual_event, threshold, text, expected", [
|
||||
(EventType.RENDER, EventType.RENDER, 10, "Long text" * 10, True), # Event matches, meets condition
|
||||
(EventType.RENDER, EventType.CLICK, 10, "Long text" * 10, False), # Event mismatch
|
||||
])
|
||||
def test_i_can_match_hook(event_type, actual_event, threshold, text, expected):
|
||||
context = create_mock_context(value=text)
|
||||
condition = WhenLongText(threshold=threshold)
|
||||
hook = Hook(event_type=event_type, conditions=[condition], executor=lambda ctx: "Executed")
|
||||
|
||||
assert hook.matches(event_type=actual_event, context=context) == expected
|
||||
|
||||
|
||||
# ================
|
||||
# Test HookManager
|
||||
# ================
|
||||
|
||||
def test_i_can_execute_hooks_in_manager():
|
||||
hook_manager = HookManager()
|
||||
|
||||
# Add hooks
|
||||
hook1 = Hook(EventType.RENDER, conditions=[], executor=lambda ctx: "Render Executed")
|
||||
hook2 = Hook(EventType.CLICK, conditions=[], executor=lambda ctx: "Click Executed")
|
||||
|
||||
hook_manager.add_hook(hook1)
|
||||
hook_manager.add_hook(hook2)
|
||||
|
||||
context = create_mock_context()
|
||||
render_results = hook_manager.execute_hooks(event_type=EventType.RENDER, context=context)
|
||||
click_results = hook_manager.execute_hooks(event_type=EventType.CLICK, context=context)
|
||||
|
||||
assert len(render_results) == 1
|
||||
assert render_results[0] == "Render Executed"
|
||||
|
||||
assert len(click_results) == 1
|
||||
assert click_results[0] == "Click Executed"
|
||||
|
||||
|
||||
def test_i_can_clear_hooks_in_manager():
|
||||
hook_manager = HookManager()
|
||||
|
||||
hook_manager.add_hook(Hook(EventType.RENDER, conditions=[], executor=lambda ctx: "Render"))
|
||||
assert len(hook_manager.hooks) == 1
|
||||
|
||||
hook_manager.clear_hooks()
|
||||
assert len(hook_manager.hooks) == 0
|
||||
|
||||
|
||||
# ================
|
||||
# Test HookBuilder with Callable Conditions
|
||||
# ================
|
||||
|
||||
def test_i_can_use_callable_with_when_custom():
|
||||
"""Test that when_custom() accepts callable predicates"""
|
||||
|
||||
# Define a simple callable condition
|
||||
def custom_condition(context):
|
||||
return isinstance(context.get_value(), str) and context.get_value().startswith("CUSTOM_")
|
||||
|
||||
# Create hook using callable condition
|
||||
hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_custom(custom_condition)
|
||||
.execute(lambda ctx: "Custom hook executed"))
|
||||
|
||||
# Test with matching context
|
||||
matching_context = create_mock_context(value="CUSTOM_test_value")
|
||||
assert hook.matches(EventType.RENDER, matching_context) == True
|
||||
assert hook.execute(matching_context) == "Custom hook executed"
|
||||
|
||||
# Test with non-matching context
|
||||
non_matching_context = create_mock_context(value="regular_value")
|
||||
assert hook.matches(EventType.RENDER, non_matching_context) == False
|
||||
|
||||
|
||||
def test_i_can_use_lambda_with_when_custom():
|
||||
"""Test that when_custom() accepts lambda expressions"""
|
||||
|
||||
# Create hook using lambda condition
|
||||
hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_custom(lambda ctx: ctx.key == "special" and isinstance(ctx.get_value(), int) and ctx.get_value() > 100)
|
||||
.execute(lambda ctx: f"Special value: {ctx.get_value()}"))
|
||||
|
||||
# Test with matching context
|
||||
matching_context = create_mock_context(value=150, key="special")
|
||||
assert hook.matches(EventType.RENDER, matching_context) == True
|
||||
assert hook.execute(matching_context) == "Special value: 150"
|
||||
|
||||
# Test with non-matching contexts
|
||||
wrong_key_context = create_mock_context(value=150, key="normal")
|
||||
assert hook.matches(EventType.RENDER, wrong_key_context) == False
|
||||
|
||||
wrong_value_context = create_mock_context(value=50, key="special")
|
||||
assert hook.matches(EventType.RENDER, wrong_value_context) == False
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value, key, json_path, expected", [
|
||||
("CUSTOM_hook_test", "test_key", "root.test", True), # Matches callable condition
|
||||
("regular_text", "test_key", "root.test", False), # Doesn't match callable condition
|
||||
(123, "test_key", "root.test", False), # Wrong type
|
||||
])
|
||||
def test_callable_condition_evaluation(value, key, json_path, expected):
|
||||
"""Test callable condition evaluation with different inputs"""
|
||||
|
||||
def custom_callable_condition(context):
|
||||
return isinstance(context.get_value(), str) and context.get_value().startswith("CUSTOM_")
|
||||
|
||||
hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_custom(custom_callable_condition)
|
||||
.execute(lambda ctx: "Executed"))
|
||||
|
||||
context = create_mock_context(value=value, key=key, json_path=json_path)
|
||||
assert hook.matches(EventType.RENDER, context) == expected
|
||||
@@ -1,12 +1,12 @@
|
||||
import pytest
|
||||
|
||||
from components.debugger.components.JsonViewer import *
|
||||
from components.jsonviewer.components.JsonViewer import *
|
||||
from components.jsonviewer.hooks import HookBuilder
|
||||
from helpers import matches, span_icon, search_elements_by_name, extract_jsonviewer_node
|
||||
|
||||
JSON_VIEWER_INSTANCE_ID = "json_viewer"
|
||||
ML_20 = "margin-left: 20px;"
|
||||
CLS_PREFIX = "mmt-jsonviewer"
|
||||
USER_ID = "user_id"
|
||||
|
||||
dn = DictNode
|
||||
ln = ListNode
|
||||
@@ -15,7 +15,7 @@ n = ValueNode
|
||||
|
||||
@pytest.fixture()
|
||||
def json_viewer(session):
|
||||
return JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, {})
|
||||
return JsonViewer(session, JSON_VIEWER_INSTANCE_ID, {})
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -41,7 +41,7 @@ def jv_id(x):
|
||||
ln([{"a": [1, 2]}], jv_id(0), 0, [dn({"a": [1, 2]}, jv_id(1), 1, {"a": ln([1, 2], jv_id(2), 2, [n(1), n(2)])})]))
|
||||
])
|
||||
def test_i_can_create_node(data, expected_node):
|
||||
json_viewer_ = JsonViewer(None, JSON_VIEWER_INSTANCE_ID, None, USER_ID, data)
|
||||
json_viewer_ = JsonViewer(None, JSON_VIEWER_INSTANCE_ID, data)
|
||||
assert json_viewer_.node == expected_node
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ def test_i_can_render(json_viewer):
|
||||
(None, Span("null", cls=f"{CLS_PREFIX}-null")),
|
||||
])
|
||||
def test_i_can_render_simple_value(session, value, expected_inner):
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
actual = jsonv.__ft__()
|
||||
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"{jv_id('root')}"})[0]
|
||||
expected = Div(
|
||||
@@ -81,7 +81,7 @@ def test_i_can_render_simple_value(session, value, expected_inner):
|
||||
|
||||
def test_i_can_render_expanded_list_node(session):
|
||||
value = [1, "hello", True]
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
# Force expansion of the node
|
||||
jsonv.set_folding_mode("expand")
|
||||
|
||||
@@ -107,7 +107,7 @@ def test_i_can_render_expanded_list_node(session):
|
||||
|
||||
def test_i_can_render_expanded_dict_node(session):
|
||||
value = {"a": 1, "b": "hello", "c": True}
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
# Force expansion of the node
|
||||
jsonv.set_folding_mode("expand")
|
||||
|
||||
@@ -133,7 +133,7 @@ def test_i_can_render_expanded_dict_node(session):
|
||||
|
||||
def test_i_can_render_expanded_list_of_dict_node(session):
|
||||
value = [{"a": 1, "b": "hello"}]
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
# Force expansion of all nodes
|
||||
jsonv.set_folding_mode("expand")
|
||||
|
||||
@@ -167,7 +167,7 @@ def test_i_can_render_expanded_list_of_dict_node(session):
|
||||
def test_render_with_collapse_folding_mode(session):
|
||||
# Create a nested structure to test collapse rendering
|
||||
value = {"a": [1, 2, 3], "b": {"x": "y", "z": True}}
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
|
||||
# Ensure folding mode is set to collapse (should be default)
|
||||
jsonv.set_folding_mode("collapse")
|
||||
@@ -195,7 +195,7 @@ def test_render_with_collapse_folding_mode(session):
|
||||
def test_render_with_specific_node_expanded_in_collapse_mode(session):
|
||||
# Create a nested structure to test mixed collapse/expand rendering
|
||||
value = {"a": [1, 2, 3], "b": {"x": "y", "z": True}}
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
|
||||
# Ensure folding mode is set to collapse
|
||||
jsonv.set_folding_mode(FoldingMode.COLLAPSE)
|
||||
@@ -230,7 +230,7 @@ def test_render_with_specific_node_expanded_in_collapse_mode(session):
|
||||
def test_multiple_folding_levels_in_collapse_mode(session):
|
||||
# Create a deeply nested structure
|
||||
value = {"level1": {"level2": {"level3": [1, 2, 3]}}}
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
|
||||
# Set folding mode to collapse
|
||||
jsonv.set_folding_mode(FoldingMode.COLLAPSE)
|
||||
@@ -262,7 +262,7 @@ def test_multiple_folding_levels_in_collapse_mode(session):
|
||||
|
||||
def test_toggle_between_folding_modes(session):
|
||||
value = {"a": [1, 2, 3], "b": {"x": "y"}}
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, value)
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, value)
|
||||
|
||||
# Start with collapse mode
|
||||
jsonv.set_folding_mode("collapse")
|
||||
@@ -271,19 +271,19 @@ def test_toggle_between_folding_modes(session):
|
||||
jsonv.set_node_folding(f"{JSON_VIEWER_INSTANCE_ID}-0", "expand")
|
||||
|
||||
# Verify node is in tracked nodes (exceptions to collapse mode)
|
||||
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._nodes_to_track
|
||||
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._folding_manager.get_nodes_to_track()
|
||||
|
||||
# Now switch to expand mode
|
||||
jsonv.set_folding_mode("expand")
|
||||
|
||||
# Tracked nodes should be cleared
|
||||
assert len(jsonv._nodes_to_track) == 0
|
||||
assert len(jsonv._folding_manager.get_nodes_to_track()) == 0
|
||||
|
||||
# Collapse specific node
|
||||
jsonv.set_node_folding(f"{JSON_VIEWER_INSTANCE_ID}-0", "collapse")
|
||||
|
||||
# Verify node is in tracked nodes (exceptions to expand mode)
|
||||
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._nodes_to_track
|
||||
assert f"{JSON_VIEWER_INSTANCE_ID}-0" in jsonv._folding_manager.get_nodes_to_track()
|
||||
|
||||
# Render and verify the output
|
||||
actual = jsonv.__ft__()
|
||||
@@ -297,34 +297,43 @@ def test_toggle_between_folding_modes(session):
|
||||
|
||||
|
||||
def test_custom_hook_rendering(session, helper):
|
||||
# Define a custom hook for testing
|
||||
def custom_predicate(key, node, h):
|
||||
return isinstance(node.value, str) and node.value == "custom_hook_test"
|
||||
|
||||
def custom_renderer(key, node, h):
|
||||
return Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class")
|
||||
|
||||
hooks = [(custom_predicate, custom_renderer)]
|
||||
|
||||
# Create JsonViewer with the custom hook
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, "custom_hook_test", hooks=hooks)
|
||||
|
||||
actual = jsonv.__ft__()
|
||||
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"{jv_id('root')}"})[0]
|
||||
|
||||
expected = Div(
|
||||
Div(
|
||||
None,
|
||||
None,
|
||||
Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class"),
|
||||
style=ML_20),
|
||||
id=f"{jv_id('root')}")
|
||||
|
||||
assert matches(to_compare, expected)
|
||||
# Define a custom condition to check if the value is "custom_hook_test"
|
||||
def custom_condition(context):
|
||||
return isinstance(context.node.value, str) and context.node.value == "custom_hook_test"
|
||||
|
||||
# Define a custom executor to render the desired output
|
||||
def custom_renderer(context):
|
||||
return Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class")
|
||||
|
||||
# Build the hook using HookBuilder
|
||||
hook = (HookBuilder()
|
||||
.on_render()
|
||||
.when_custom(custom_condition)
|
||||
.execute(custom_renderer))
|
||||
|
||||
# Create a JsonViewer with the new hook
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, "custom_hook_test", hooks=[hook])
|
||||
|
||||
# Actual rendered output
|
||||
actual = jsonv.__ft__()
|
||||
to_compare = search_elements_by_name(actual, "div", attrs={"id": f"{jv_id('root')}"})[0]
|
||||
|
||||
# Expected rendered output
|
||||
expected = Div(
|
||||
Div(
|
||||
None,
|
||||
None,
|
||||
Span("CUSTOM_HOOK_RENDER", cls="custom-hook-class"),
|
||||
style=ML_20),
|
||||
id=f"{jv_id('root')}"
|
||||
)
|
||||
|
||||
# Assert that the actual output matches the expected output
|
||||
assert matches(to_compare, expected)
|
||||
|
||||
|
||||
def test_folding_mode_operations(session):
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, None, USER_ID, {"a": [1, 2, 3]})
|
||||
jsonv = JsonViewer(session, JSON_VIEWER_INSTANCE_ID, {"a": [1, 2, 3]})
|
||||
|
||||
# Check default folding mode
|
||||
assert jsonv.get_folding_mode() == "collapse"
|
||||
@@ -338,11 +347,11 @@ def test_folding_mode_operations(session):
|
||||
jsonv.set_node_folding(node_id, "collapse")
|
||||
|
||||
# Node should be in tracked nodes since it differs from the default mode
|
||||
assert node_id in jsonv._nodes_to_track
|
||||
assert node_id in jsonv._folding_manager.get_nodes_to_track()
|
||||
|
||||
# Restore to match default mode
|
||||
jsonv.set_node_folding(node_id, "expand")
|
||||
assert node_id not in jsonv._nodes_to_track
|
||||
assert node_id not in jsonv._folding_manager.get_nodes_to_track()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("input_value, expected_output", [
|
||||
@@ -353,7 +362,7 @@ def test_folding_mode_operations(session):
|
||||
('', '""'), # Empty string
|
||||
])
|
||||
def test_add_quotes(input_value, expected_output):
|
||||
result = JsonViewer.add_quotes(input_value)
|
||||
result = JsonViewerHelper.add_quotes(input_value)
|
||||
assert result == expected_output
|
||||
|
||||
|
||||
@@ -367,4 +376,4 @@ def test_helper_is_sha256(helper):
|
||||
assert not helper.is_sha256("a" * 63) # Too short
|
||||
assert not helper.is_sha256("a" * 65) # Too long
|
||||
assert not helper.is_sha256("g" * 64) # Invalid character
|
||||
assert not helper.is_sha256("test") # Not a hash
|
||||
assert not helper.is_sha256("test") # Not a hash
|
||||
491
tests/test_preprocessor.py
Normal file
491
tests/test_preprocessor.py
Normal file
@@ -0,0 +1,491 @@
|
||||
import pytest
|
||||
|
||||
from core.preprocessor import PlainTextPreprocessor, VariableParsingError, VariableProcessingError
|
||||
|
||||
|
||||
def test_i_can_parse_empty_text():
|
||||
"""Test that I can parse empty text input"""
|
||||
processor = PlainTextPreprocessor()
|
||||
result = processor.parse("")
|
||||
assert result == []
|
||||
|
||||
|
||||
def test_i_can_parse_text_without_variables():
|
||||
"""Test that I can parse text without any variables"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "This is just plain text with no variables"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "text",
|
||||
"content": text,
|
||||
"start": 0,
|
||||
"end": len(text)
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_simple_variable():
|
||||
"""Test that I can parse text with only a simple variable"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 9
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_underscores():
|
||||
"""Test that I can parse variable with underscores in name"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$my_variable_name"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "my_variable_name",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 17
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_numbers():
|
||||
"""Test that I can parse variable with numbers in name"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$var123"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "var123",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 7
|
||||
}]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_properties_with_underscores_and_numbers():
|
||||
"""Test that I can parse property names with underscores and numbers"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$var._prop123.sub_prop_456"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "var",
|
||||
"properties": ["_prop123", "sub_prop_456"],
|
||||
"start": 0,
|
||||
"end": 26
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_starting_with_underscore():
|
||||
"""Test that I can parse variable name starting with underscore"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$_private_var"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "_private_var",
|
||||
"properties": [],
|
||||
"start": 0,
|
||||
"end": 13
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_single_property():
|
||||
"""Test that I can parse variable with one property"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable.prop"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": ["prop"],
|
||||
"start": 0,
|
||||
"end": 14
|
||||
}]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_variable_with_multiple_properties():
|
||||
"""Test that I can parse variable with multiple properties"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable.prop.subprop.deep"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": ["prop", "subprop", "deep"],
|
||||
"start": 0,
|
||||
"end": 27
|
||||
}]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_text_with_variable_in_middle():
|
||||
"""Test that I can parse text with variable in the middle"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "project > $project_id and more"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": "project > ",
|
||||
"start": 0,
|
||||
"end": 10
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "project_id",
|
||||
"properties": [],
|
||||
"start": 10,
|
||||
"end": 21
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " and more",
|
||||
"start": 21,
|
||||
"end": 30
|
||||
}
|
||||
]
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_multiple_variables():
|
||||
"""Test that I can parse text with multiple variables"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "value == $variable.prop and $other_var"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": "value == ",
|
||||
"start": 0,
|
||||
"end": 9
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "variable",
|
||||
"properties": ["prop"],
|
||||
"start": 9,
|
||||
"end": 23
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " and ",
|
||||
"start": 23,
|
||||
"end": 28
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "other_var",
|
||||
"properties": [],
|
||||
"start": 28,
|
||||
"end": 38
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_preserve_all_whitespace():
|
||||
"""Test that I can preserve all whitespace including tabs and newlines"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = " $var \t\n $other.prop "
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": " ",
|
||||
"start": 0,
|
||||
"end": 2
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "var",
|
||||
"properties": [],
|
||||
"start": 2,
|
||||
"end": 6
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " \t\n ",
|
||||
"start": 6,
|
||||
"end": 12
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "other",
|
||||
"properties": ["prop"],
|
||||
"start": 12,
|
||||
"end": 23
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": " ",
|
||||
"start": 23,
|
||||
"end": 25
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_text_with_special_characters():
|
||||
"""Test that I can parse text with special characters"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "Hello $user! @#%^&*()+={}[]|\\:;\"'<>?,./~`"
|
||||
result = processor.parse(text)
|
||||
|
||||
expected = [
|
||||
{
|
||||
"type": "text",
|
||||
"content": "Hello ",
|
||||
"start": 0,
|
||||
"end": 6
|
||||
},
|
||||
{
|
||||
"type": "variable",
|
||||
"name": "user",
|
||||
"properties": [],
|
||||
"start": 6,
|
||||
"end": 11
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"content": "! @#%^&*()+={}[]|\\:;\"'<>?,./~`",
|
||||
"start": 11,
|
||||
"end": 41
|
||||
}
|
||||
]
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_i_can_parse_complex_expression():
|
||||
"""Test that I can parse complex but valid expression"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "if ($user.profile.age > 18 && $user.status == 'active') { $action.execute(); }"
|
||||
result = processor.parse(text)
|
||||
|
||||
# Should parse successfully and find all variables
|
||||
variables = [elem for elem in result if elem["type"] == "variable"]
|
||||
assert len(variables) == 3
|
||||
|
||||
# Check variable details
|
||||
assert variables[0]["name"] == "user"
|
||||
assert variables[0]["properties"] == ["profile", "age"]
|
||||
|
||||
assert variables[1]["name"] == "user"
|
||||
assert variables[1]["properties"] == ["status"]
|
||||
|
||||
assert variables[2]["name"] == "action"
|
||||
assert variables[2]["properties"] == ["execute"]
|
||||
|
||||
|
||||
def test_positions_are_accurate():
|
||||
"""Test that element positions are accurate"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "abc$var123*def"
|
||||
result = processor.parse(text)
|
||||
|
||||
assert len(result) == 3
|
||||
|
||||
# Text before
|
||||
assert result[0]["start"] == 0
|
||||
assert result[0]["end"] == 3
|
||||
assert result[0]["content"] == "abc"
|
||||
|
||||
# Variable
|
||||
assert result[1]["start"] == 3
|
||||
assert result[1]["end"] == 10
|
||||
assert result[1]["name"] == "var123"
|
||||
|
||||
# Text after
|
||||
assert result[2]["start"] == 10
|
||||
assert result[2]["end"] == 14
|
||||
assert result[2]["content"] == "*def"
|
||||
|
||||
|
||||
# Error cases
|
||||
def test_i_cannot_parse_dollar_alone_at_end():
|
||||
"""Test that I cannot parse $ at the end of text"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "Hello $"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 7
|
||||
assert "Invalid syntax" in str(exc_info.value)
|
||||
# assert "Variable name missing after '$'" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_dollar_alone_in_middle():
|
||||
"""Test that I cannot parse $ alone in middle of text"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "Hello $ world"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 7
|
||||
assert "Invalid syntax" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_dot_immediately_after_dollar():
|
||||
"""Test that I cannot parse $.property (dot immediately after $)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$.property"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 1
|
||||
assert "Invalid syntax" in str(exc_info.value)
|
||||
# assert "Variable name missing before '.'" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_variable_ending_with_dot():
|
||||
"""Test that I cannot parse $variable. (dot at the end)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable."
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("text", ["$variable. prop", "$variable .prop", "$variable . prop"])
|
||||
def test_i_cannot_parse_variable_when_space_in_variable_name(text):
|
||||
"""Test that I cannot parse $variable. (dot at the end)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
# text = "$variable. "
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_variable_with_empty_property():
|
||||
"""Test that I cannot parse $variable..property (empty property between dots)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable..property"
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_variable_ending_with_multiple_dots():
|
||||
"""Test that I cannot parse $variable... (multiple dots at end)"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$variable..."
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 9
|
||||
assert "Invalid syntax in property name." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_parse_when_consecutive_variables():
|
||||
"""Test that I can parse consecutive variables without text between"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$var1$var2"
|
||||
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
assert exc_info.value.position == 5
|
||||
assert "Invalid syntax." in str(exc_info.value)
|
||||
|
||||
|
||||
def test_first_error_is_reported_with_multiple_errors():
|
||||
"""Test that first error is reported when multiple $ errors exist"""
|
||||
processor = PlainTextPreprocessor()
|
||||
text = "$ and $. and $var."
|
||||
with pytest.raises(VariableParsingError) as exc_info:
|
||||
processor.parse(text)
|
||||
|
||||
# Should report the first error ($ alone)
|
||||
assert exc_info.value.position == 1
|
||||
|
||||
|
||||
def test_i_can_preprocess_simple_variable():
|
||||
"""Test preprocessing text with a simple variable"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"name": "John"}
|
||||
result = processor.preprocess("Hello $name!", namespace)
|
||||
assert result == "Hello John!"
|
||||
|
||||
|
||||
def test_i_can_preprocess_with_properties():
|
||||
"""Test preprocessing text with variable properties"""
|
||||
|
||||
class User:
|
||||
def __init__(self):
|
||||
self.profile = type('Profile', (), {'age': 25})()
|
||||
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"user": User()}
|
||||
result = processor.preprocess("Age: $user.profile.age", namespace)
|
||||
assert result == "Age: 25"
|
||||
|
||||
|
||||
def test_i_can_preprocess_multiple_variables():
|
||||
"""Test preprocessing text with multiple variables"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"first": "Hello", "second": "World"}
|
||||
result = processor.preprocess("$first $second!", namespace)
|
||||
assert result == "Hello World!"
|
||||
|
||||
|
||||
def test_i_can_preprocess_empty_text():
|
||||
"""Test preprocessing empty text"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {}
|
||||
result = processor.preprocess("", namespace)
|
||||
assert result == ""
|
||||
|
||||
|
||||
def test_i_cannot_preprocess_undefined_variable():
|
||||
"""Test preprocessing with undefined variable raises error"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {}
|
||||
with pytest.raises(VariableProcessingError) as exc_info:
|
||||
processor.preprocess("$undefined_var", namespace)
|
||||
assert "Variable 'undefined_var' is not defined" in str(exc_info.value)
|
||||
|
||||
|
||||
def test_i_cannot_preprocess_invalid_property():
|
||||
"""Test preprocessing with invalid property access"""
|
||||
processor = PlainTextPreprocessor()
|
||||
namespace = {"obj": object()}
|
||||
with pytest.raises(VariableProcessingError) as exc_info:
|
||||
processor.preprocess("some text $obj.invalid_prop", namespace)
|
||||
|
||||
assert "Invalid property 'invalid_prop' for variable 'obj'" in str(exc_info.value)
|
||||
assert exc_info.value.position == 14
|
||||
@@ -1,33 +1,66 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import Div
|
||||
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo, CommandHistory
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.undo_redo.constants import UndoRedoAttrs
|
||||
from core.dbengine import DbEngine
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, div_icon, Contains, DoesNotContain
|
||||
from my_mocks import tabs_manager
|
||||
|
||||
DB_ENGINE_ROOT = "undo_redo_test_db"
|
||||
TEST_DB_ENTRY = "TestDbEntry"
|
||||
TEST_DB_KEY = "TestDbKey"
|
||||
|
||||
class UndoableCommand(CommandHistory):
|
||||
def __init__(self, old_value=0, new_value=0):
|
||||
super().__init__("Set new value", lambda value: f"Setting new value to {value}", None)
|
||||
self.old_value = old_value
|
||||
self.new_value = new_value
|
||||
|
||||
class TestCommand:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def undo(self):
|
||||
return Div(self.old_value, hx_swap_oob="true")
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, TestCommand):
|
||||
return False
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def redo(self):
|
||||
return Div(self.new_value, hx_swap_oob="true")
|
||||
def __hash__(self):
|
||||
return hash(self.value)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def engine(session):
|
||||
if os.path.exists(DB_ENGINE_ROOT):
|
||||
shutil.rmtree(DB_ENGINE_ROOT)
|
||||
|
||||
engine = DbEngine(DB_ENGINE_ROOT)
|
||||
engine.init(session["user_id"])
|
||||
|
||||
yield engine
|
||||
|
||||
shutil.rmtree(DB_ENGINE_ROOT)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def settings_manager(engine):
|
||||
return SettingsManager(engine=engine)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def undo_redo(session, tabs_manager):
|
||||
def undo_redo(session, tabs_manager, settings_manager):
|
||||
return UndoRedo(session,
|
||||
UndoRedo.create_component_id(session),
|
||||
settings_manager=SettingsManager(engine=MemoryDbEngine()),
|
||||
settings_manager=settings_manager,
|
||||
tabs_manager=tabs_manager)
|
||||
|
||||
|
||||
def init_command(session, settings_manager, undo_redo, value, on_undo=None):
|
||||
settings_manager.save(session, TEST_DB_ENTRY, {TEST_DB_KEY: TestCommand(value)})
|
||||
undo_redo.snapshot(UndoRedoAttrs(f"Set value to {value}", on_undo=on_undo), TEST_DB_ENTRY, TEST_DB_KEY)
|
||||
|
||||
|
||||
def test_i_can_render(undo_redo):
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
@@ -39,13 +72,13 @@ def test_i_can_render(undo_redo):
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_i_can_render_when_undoing_and_redoing(undo_redo):
|
||||
undo_redo.push(UndoableCommand(0, 1))
|
||||
undo_redo.push(UndoableCommand(1, 2))
|
||||
def test_i_can_render_when_undoing_and_redoing(session, settings_manager, undo_redo):
|
||||
init_command(session, settings_manager, undo_redo, "1")
|
||||
init_command(session, settings_manager, undo_redo, "2")
|
||||
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set new value'."),
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set value to 2'."),
|
||||
Div(div_icon("redo", cls=Contains("mmt-btn-disabled")), data_tooltip="Nothing to redo."),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
@@ -54,8 +87,8 @@ def test_i_can_render_when_undoing_and_redoing(undo_redo):
|
||||
undo_redo.undo() # The command is now undone. We can redo it and undo the first command.
|
||||
actual = undo_redo.__ft__()
|
||||
expected = Div(
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set new value'."),
|
||||
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Redo 'Set new value'."),
|
||||
Div(div_icon("undo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Undo 'Set value to 1'."),
|
||||
Div(div_icon("redo", cls=DoesNotContain("mmt-btn-disabled")), data_tooltip="Redo 'Set value to 2'."),
|
||||
id=undo_redo.get_id(),
|
||||
)
|
||||
assert matches(actual, expected)
|
||||
@@ -88,25 +121,48 @@ def test_i_can_render_when_undoing_and_redoing(undo_redo):
|
||||
assert matches(actual, expected)
|
||||
|
||||
|
||||
def test_i_can_undo_and_redo(undo_redo):
|
||||
undo_redo.push(UndoableCommand(0, 1))
|
||||
undo_redo.push(UndoableCommand(1, 2))
|
||||
def test_values_are_correctly_reset(session, settings_manager, undo_redo):
|
||||
# checks that the values are correctly returned
|
||||
# Only checks that hx_swap_oob="true" is automatically put when id is present in the return
|
||||
|
||||
def on_undo():
|
||||
current = settings_manager.get(session, TEST_DB_ENTRY, TEST_DB_KEY)
|
||||
return Div(current.value, id='an_id')
|
||||
|
||||
init_command(session, settings_manager, undo_redo, "1", on_undo=on_undo)
|
||||
init_command(session, settings_manager, undo_redo, "2", on_undo=on_undo)
|
||||
|
||||
self, res = undo_redo.undo()
|
||||
expected = Div(1, hx_swap_oob="true")
|
||||
expected = Div("1", id='an_id', hx_swap_oob="true")
|
||||
assert matches(res, expected)
|
||||
|
||||
self, res = undo_redo.redo()
|
||||
expected = Div(2, hx_swap_oob="true")
|
||||
expected = Div("2", id='an_id', hx_swap_oob="true")
|
||||
assert matches(res, expected)
|
||||
|
||||
def test_history_is_rewritten_when_pushing_a_command(undo_redo):
|
||||
undo_redo.push(UndoableCommand(0, 1))
|
||||
undo_redo.push(UndoableCommand(1, 2))
|
||||
undo_redo.push(UndoableCommand(2, 3))
|
||||
|
||||
def test_i_can_manage_when_the_entry_was_not_present(session, settings_manager, undo_redo):
|
||||
def on_undo():
|
||||
snapshot = settings_manager.load(session, TEST_DB_ENTRY)
|
||||
if TEST_DB_KEY in snapshot:
|
||||
return Div(snapshot[TEST_DB_KEY].value, id='an_id')
|
||||
else:
|
||||
return Div("**Not Found**", id='an_id')
|
||||
|
||||
init_command(session, settings_manager, undo_redo, "1", on_undo=on_undo)
|
||||
|
||||
self, res = undo_redo.undo()
|
||||
expected = Div("**Not Found**", id='an_id', hx_swap_oob="true")
|
||||
assert matches(res, expected)
|
||||
|
||||
|
||||
def test_history_is_rewritten_when_pushing_a_command_after_undo(session, settings_manager, undo_redo):
|
||||
init_command(session, settings_manager, undo_redo, "1")
|
||||
init_command(session, settings_manager, undo_redo, "2")
|
||||
init_command(session, settings_manager, undo_redo, "3")
|
||||
|
||||
undo_redo.undo()
|
||||
undo_redo.undo()
|
||||
undo_redo.push(UndoableCommand(1, 5))
|
||||
init_command(session, settings_manager, undo_redo, "5")
|
||||
|
||||
assert len(undo_redo.history) == 2
|
||||
assert len(undo_redo.history) == 3 # do not forget that history always has a default command with digest = None
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fastcore.basics import NotStr
|
||||
from fasthtml.components import *
|
||||
from fasthtml.xtend import Script
|
||||
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.WorkflowDesigner import WorkflowDesigner, COMPONENT_TYPES
|
||||
from components.workflows.constants import ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowsDesignerSettings, WorkflowComponent, Connection
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, Contains
|
||||
from my_mocks import tabs_manager
|
||||
@@ -13,6 +17,27 @@ from my_mocks import tabs_manager
|
||||
TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_undo_redo(session):
|
||||
# Create a mock UndoRedo instance
|
||||
undo_redo = MagicMock(spec=UndoRedo)
|
||||
|
||||
# Store original get method
|
||||
original_get = InstanceManager.get
|
||||
|
||||
def mock_get(sess, instance_id, *args, **kwargs):
|
||||
if instance_id == UndoRedo.create_component_id(sess):
|
||||
return undo_redo
|
||||
return original_get(sess, instance_id, *args, **kwargs)
|
||||
|
||||
# Replace get method with our mock
|
||||
InstanceManager.get = mock_get
|
||||
|
||||
yield undo_redo
|
||||
|
||||
# Restore original get method after test
|
||||
InstanceManager.get = original_get
|
||||
|
||||
@pytest.fixture
|
||||
def designer(session, tabs_manager):
|
||||
return WorkflowDesigner(session=session, _id=TEST_WORKFLOW_DESIGNER_ID,
|
||||
@@ -72,7 +97,7 @@ def test_i_can_render_no_component(designer):
|
||||
actual = designer.__ft__()
|
||||
expected = Div(
|
||||
H1("Workflow Name"),
|
||||
P("Drag components from the toolbox to the canvas to create your workflow."),
|
||||
# P("Drag components from the toolbox to the canvas to create your workflow."),
|
||||
Div(id=f"t_{designer.get_id()}"), # media + error message
|
||||
Div(id=f"d_{designer.get_id()}"), # designer container
|
||||
Div(cls="wkf-splitter"),
|
||||
|
||||
@@ -73,6 +73,7 @@ def test_run_simple_workflow(engine):
|
||||
assert result == [1, 2, 3]
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Not yet implemented")
|
||||
def test_process_single_item(engine):
|
||||
"""Test the internal _process_single_item method."""
|
||||
mock_processor = MagicMock(spec=DataProcessor)
|
||||
|
||||
78
tests/test_workflow_engine_integration.py
Normal file
78
tests/test_workflow_engine_integration.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
from core.Expando import Expando
|
||||
|
||||
from core.jira import JiraRequestTypes
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from workflow.engine import JiraDataProducer, TableDataProducer
|
||||
|
||||
JIRA_IMPORT_PATH = "workflow.engine.Jira"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_jira_search_1():
|
||||
with patch(JIRA_IMPORT_PATH) as mock_jira_class:
|
||||
mock_jira_instance = Mock()
|
||||
mock_jira_instance.search.return_value = [
|
||||
Expando({
|
||||
"key": "TEST-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue",
|
||||
"status": {"name": "Open"},
|
||||
"assignee": {"displayName": "Test User"}
|
||||
}
|
||||
})
|
||||
]
|
||||
mock_jira_class.return_value = mock_jira_instance
|
||||
|
||||
yield mock_jira_instance # This allows us to access the mock instance in our tests
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_jira_error():
|
||||
with patch(JIRA_IMPORT_PATH) as mock_jira_class:
|
||||
mock_jira_instance = Mock()
|
||||
mock_jira_instance.search.side_effect = Exception("Jira API Error")
|
||||
mock_jira_class.return_value = mock_jira_instance
|
||||
|
||||
yield mock_jira_instance
|
||||
|
||||
|
||||
def get_jira_patch(jp: JiraDataProducer):
|
||||
# Create and configure the mock instance
|
||||
mock_jira_instance = Mock()
|
||||
if jp.request_type == JiraRequestTypes.Search.value:
|
||||
mock_jira_instance.search.return_value = [
|
||||
Expando({
|
||||
"key": "TEST-1",
|
||||
"fields": {
|
||||
"summary": "Test Issue",
|
||||
"status": {"name": "Open"},
|
||||
"assignee": {"displayName": "Test User"}
|
||||
}
|
||||
})
|
||||
]
|
||||
else:
|
||||
raise ValueError("Hello Kodjo. Unsupported request type !")
|
||||
|
||||
return patch(JIRA_IMPORT_PATH, return_value=mock_jira_instance)
|
||||
|
||||
|
||||
def jira_producer(session, request_type, request, fields=None):
|
||||
return JiraDataProducer(session,
|
||||
SettingsManager(MemoryDbEngine()),
|
||||
"component_id",
|
||||
request_type=request_type,
|
||||
request=request,
|
||||
fields=fields)
|
||||
|
||||
|
||||
def test_i_can_produce_jira_search(session):
|
||||
data = {}
|
||||
jp = jira_producer(session, JiraRequestTypes.Search, "project=key1")
|
||||
|
||||
with get_jira_patch(jp):
|
||||
res = list(jp.process(data))
|
||||
|
||||
assert len(res) == 1
|
||||
assert res[0].key == "TEST-1"
|
||||
@@ -4,10 +4,12 @@ import pandas as pd
|
||||
import pytest
|
||||
from pandas.testing import assert_frame_equal
|
||||
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.WorkflowDesigner import COMPONENT_TYPES, WorkflowDesigner
|
||||
from components.workflows.components.WorkflowPlayer import WorkflowPlayer, WorkflowsPlayerError
|
||||
from components.workflows.constants import ProcessorTypes
|
||||
from components.workflows.db_management import WorkflowComponent, Connection, ComponentState, WorkflowsDesignerSettings
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from my_mocks import tabs_manager
|
||||
from workflow.engine import DataProcessorError
|
||||
@@ -16,6 +18,27 @@ TEST_WORKFLOW_DESIGNER_ID = "workflow_designer_id"
|
||||
TEST_WORKFLOW_PLAYER_ID = "workflow_player_id"
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_undo_redo(session):
|
||||
# Create a mock UndoRedo instance
|
||||
undo_redo = MagicMock(spec=UndoRedo)
|
||||
|
||||
# Store original get method
|
||||
original_get = InstanceManager.get
|
||||
|
||||
def mock_get(sess, instance_id, *args, **kwargs):
|
||||
if instance_id == UndoRedo.create_component_id(sess):
|
||||
return undo_redo
|
||||
return original_get(sess, instance_id, *args, **kwargs)
|
||||
|
||||
# Replace get method with our mock
|
||||
InstanceManager.get = mock_get
|
||||
|
||||
yield undo_redo
|
||||
|
||||
# Restore original get method after test
|
||||
InstanceManager.get = original_get
|
||||
|
||||
@pytest.fixture
|
||||
def settings_manager():
|
||||
return SettingsManager(MemoryDbEngine())
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from fasthtml.components import *
|
||||
|
||||
from components.form.components.MyForm import FormField, MyForm
|
||||
from components.undo_redo.components.UndoRedo import UndoRedo
|
||||
from components.workflows.components.Workflows import Workflows
|
||||
from core.instance_manager import InstanceManager
|
||||
from core.settings_management import SettingsManager, MemoryDbEngine
|
||||
from helpers import matches, div_icon, search_elements_by_name, Contains
|
||||
from my_mocks import tabs_manager
|
||||
@@ -18,6 +22,28 @@ def workflows(session, tabs_manager):
|
||||
tabs_manager=tabs_manager)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_undo_redo(session):
|
||||
# Create a mock UndoRedo instance
|
||||
undo_redo = MagicMock(spec=UndoRedo)
|
||||
|
||||
# Store original get method
|
||||
original_get = InstanceManager.get
|
||||
|
||||
def mock_get(sess, instance_id, *args, **kwargs):
|
||||
if instance_id == UndoRedo.create_component_id(sess):
|
||||
return undo_redo
|
||||
return original_get(sess, instance_id, *args, **kwargs)
|
||||
|
||||
# Replace get method with our mock
|
||||
InstanceManager.get = mock_get
|
||||
|
||||
yield undo_redo
|
||||
|
||||
# Restore original get method after test
|
||||
InstanceManager.get = original_get
|
||||
|
||||
|
||||
def test_render_no_workflow(workflows):
|
||||
actual = workflows.__ft__()
|
||||
expected = Div(
|
||||
|
||||
Reference in New Issue
Block a user