Working implementation of DefaultDataPresenter
This commit is contained in:
@@ -531,7 +531,8 @@ class WorkflowDesigner(BaseComponent):
|
|||||||
value=component.properties.get("columns", ""),
|
value=component.properties.get("columns", ""),
|
||||||
placeholder="Columns to display, separated by comma",
|
placeholder="Columns to display, separated by comma",
|
||||||
cls="input w-full"),
|
cls="input w-full"),
|
||||||
P("Comma separated list of columns to display. Use * to display all columns, source=dest to rename columns."),
|
P("Comma separated list of columns to display. Use '*' to display all columns, 'source=dest' to rename columns."),
|
||||||
|
P("Use 'parent.*=*' to display all columns from object 'parent' and rename them removing the 'parent' prefix."),
|
||||||
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
cls="fieldset bg-base-200 border-base-300 rounded-box border p-4"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -13,7 +13,8 @@ from components.workflows.db_management import WorkflowComponentRuntimeState, \
|
|||||||
WorkflowComponent, ComponentState
|
WorkflowComponent, ComponentState
|
||||||
from core.instance_manager import InstanceManager
|
from core.instance_manager import InstanceManager
|
||||||
from core.utils import get_unique_id, make_safe_id
|
from core.utils import get_unique_id, make_safe_id
|
||||||
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataPresenter, DefaultDataFilter, JiraDataProducer
|
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||||
|
from workflow.engine import WorkflowEngine, TableDataProducer, DefaultDataFilter, JiraDataProducer
|
||||||
|
|
||||||
grid_settings = DataGridSettings(
|
grid_settings = DataGridSettings(
|
||||||
header_visible=True,
|
header_visible=True,
|
||||||
|
|||||||
103
src/workflow/DefaultDataPresenter.py
Normal file
103
src/workflow/DefaultDataPresenter.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from core.Expando import Expando
|
||||||
|
from workflow.engine import DataPresenter
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultDataPresenter(DataPresenter):
|
||||||
|
"""Default data presenter that returns the input data unchanged."""
|
||||||
|
|
||||||
|
def __init__(self, component_id: str, mappings_definition: str):
|
||||||
|
super().__init__(component_id)
|
||||||
|
self._mappings_definition = mappings_definition
|
||||||
|
self._split_definitions = [definition.strip() for definition in mappings_definition.split(",")]
|
||||||
|
|
||||||
|
if "*" not in mappings_definition:
|
||||||
|
self._static_mappings = self._get_static_mappings()
|
||||||
|
else:
|
||||||
|
self._static_mappings = None
|
||||||
|
|
||||||
|
def present(self, data: Any) -> Any:
|
||||||
|
self._validate_mappings_definition()
|
||||||
|
|
||||||
|
if self._static_mappings:
|
||||||
|
return Expando(data.to_dict(self._static_mappings))
|
||||||
|
|
||||||
|
dynamic_mappings = self._get_dynamic_mappings(data)
|
||||||
|
return Expando(data.to_dict(dynamic_mappings))
|
||||||
|
|
||||||
|
def _get_dynamic_mappings(self, data):
|
||||||
|
|
||||||
|
manage_conflicts = {}
|
||||||
|
|
||||||
|
mappings = {}
|
||||||
|
for mapping in self._split_definitions:
|
||||||
|
if "=" in mapping:
|
||||||
|
key, value = [s.strip() for s in mapping.split('=', 1)]
|
||||||
|
if key == "*":
|
||||||
|
# all fields
|
||||||
|
if value != "*":
|
||||||
|
raise ValueError("Only '*' is accepted when renaming wildcard.")
|
||||||
|
for key in data.as_dict().keys():
|
||||||
|
if key in manage_conflicts:
|
||||||
|
raise ValueError(f"Collision detected for field '{key}'. It is mapped from both '{manage_conflicts[key]}' and '{mapping}'.")
|
||||||
|
manage_conflicts[key] = mapping
|
||||||
|
mappings[key] = key
|
||||||
|
elif key.endswith(".*"):
|
||||||
|
# all fields in a sub-object
|
||||||
|
if value != "*":
|
||||||
|
raise ValueError("Only '*' is accepted when renaming wildcard.")
|
||||||
|
obj_path = key[:-2]
|
||||||
|
sub_obj = data.get(obj_path)
|
||||||
|
if isinstance(sub_obj, dict):
|
||||||
|
for sub_field in sub_obj:
|
||||||
|
if sub_field in manage_conflicts:
|
||||||
|
raise ValueError(
|
||||||
|
f"Collision detected for field '{sub_field}'. It is mapped from both '{manage_conflicts[sub_field]}' and '{mapping}'.")
|
||||||
|
manage_conflicts[sub_field] = mapping
|
||||||
|
mappings[f"{obj_path}.{sub_field}"] = sub_field
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Field '{obj_path}' is not an object.")
|
||||||
|
else:
|
||||||
|
mappings[key.strip()] = value.strip()
|
||||||
|
|
||||||
|
|
||||||
|
else:
|
||||||
|
if mapping == "*":
|
||||||
|
# all fields
|
||||||
|
for key in data.as_dict().keys():
|
||||||
|
mappings[key] = key
|
||||||
|
elif mapping.endswith(".*"):
|
||||||
|
# all fields in a sub-object
|
||||||
|
obj_path = mapping[:-2]
|
||||||
|
sub_obj = data.get(obj_path)
|
||||||
|
if isinstance(sub_obj, dict):
|
||||||
|
for sub_field in sub_obj:
|
||||||
|
mappings[f"{obj_path}.{sub_field}"] = f"{obj_path}.{sub_field}"
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Field '{obj_path}' is not an object.")
|
||||||
|
else:
|
||||||
|
mappings[mapping] = mapping
|
||||||
|
|
||||||
|
return mappings
|
||||||
|
|
||||||
|
def _get_static_mappings(self):
|
||||||
|
mappings = {}
|
||||||
|
for mapping in self._split_definitions:
|
||||||
|
if "=" in mapping:
|
||||||
|
key, value = [s.strip() for s in mapping.split('=', 1)]
|
||||||
|
mappings[key] = value
|
||||||
|
else:
|
||||||
|
mappings[mapping] = mapping
|
||||||
|
|
||||||
|
return mappings
|
||||||
|
|
||||||
|
def _validate_mappings_definition(self):
|
||||||
|
last_char_was_comma = False
|
||||||
|
for i, char in enumerate(self._mappings_definition):
|
||||||
|
if char == ',':
|
||||||
|
if last_char_was_comma:
|
||||||
|
raise ValueError(f"Invalid mappings definition: Error found at index {i}")
|
||||||
|
last_char_was_comma = True
|
||||||
|
elif not char.isspace():
|
||||||
|
last_char_was_comma = False
|
||||||
@@ -105,32 +105,6 @@ class JiraDataProducer(DataProducer):
|
|||||||
yield from jira.jql(self.jira_query)
|
yield from jira.jql(self.jira_query)
|
||||||
|
|
||||||
|
|
||||||
class DefaultDataPresenter(DataPresenter):
|
|
||||||
"""Default data presenter that returns the input data unchanged."""
|
|
||||||
|
|
||||||
def __init__(self, component_id: str, columns_as_str: str):
|
|
||||||
super().__init__(component_id)
|
|
||||||
if not columns_as_str or columns_as_str == "*":
|
|
||||||
self.mappings = None
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
self.mappings = {}
|
|
||||||
temp_mappings = [col.strip() for col in columns_as_str.split(",")]
|
|
||||||
for mapping in temp_mappings:
|
|
||||||
if "=" in mapping:
|
|
||||||
key, value = mapping.split("=")
|
|
||||||
self.mappings[key] = value
|
|
||||||
else:
|
|
||||||
self.mappings[mapping] = mapping
|
|
||||||
|
|
||||||
def present(self, data: Any) -> Any:
|
|
||||||
if self.mappings is None:
|
|
||||||
return data
|
|
||||||
|
|
||||||
return Expando(data.to_dict(self.mappings))
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultDataFilter(DataFilter):
|
class DefaultDataFilter(DataFilter):
|
||||||
def __init__(self, component_id: str, filter_expression: str):
|
def __init__(self, component_id: str, filter_expression: str):
|
||||||
super().__init__(component_id)
|
super().__init__(component_id)
|
||||||
@@ -173,7 +147,6 @@ class WorkflowEngine:
|
|||||||
for processed_item in processor.process(item):
|
for processed_item in processor.process(item):
|
||||||
# Recursively process through remaining processors
|
# Recursively process through remaining processors
|
||||||
yield from self._process_single_item(processed_item, processor_index + 1)
|
yield from self._process_single_item(processed_item, processor_index + 1)
|
||||||
|
|
||||||
|
|
||||||
def run(self) -> Generator[Any, None, None]:
|
def run(self) -> Generator[Any, None, None]:
|
||||||
"""
|
"""
|
||||||
|
|||||||
186
tests/test_default_data_presenter.py
Normal file
186
tests/test_default_data_presenter.py
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from core.Expando import Expando
|
||||||
|
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_static_mappings():
|
||||||
|
mappings_def = "field1 = renamed_1 , field2 "
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert actual == Expando({"renamed_1": "value1", "field2": "value2"}) # field3 is removed
|
||||||
|
|
||||||
|
|
||||||
|
def test_the_latter_mappings_take_precedence():
|
||||||
|
mappings_def = "field1 = renamed_1 , field1 "
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert actual == Expando({"field1": "value1"}) # field3 is removed
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_static_mappings_with_sub_fields():
|
||||||
|
mappings_def = "root.field1 = renamed_1 , root.field2, root.sub_field.field3, root.sub_field.field4=renamed4 "
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root": {"field1": "value1",
|
||||||
|
"field2": "value2",
|
||||||
|
"sub_field": {"field3": "value3",
|
||||||
|
"field4": "value4"
|
||||||
|
}}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert isinstance(actual, Expando)
|
||||||
|
assert actual.as_dict() == {"renamed_1": "value1",
|
||||||
|
"root.field2": "value2",
|
||||||
|
"root.sub_field.field3": "value3",
|
||||||
|
"renamed4": "value4"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_dynamic_mappings():
|
||||||
|
mappings_def = "*"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert actual == Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_dynamic_mappings_for_complex_data():
|
||||||
|
mappings_def = "*"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root": {"field1": "value1",
|
||||||
|
"field2": "value2",
|
||||||
|
"sub_field": {"field3": "value3",
|
||||||
|
"field4": "value4"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"field5": "value5"}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert isinstance(actual, Expando)
|
||||||
|
assert actual.as_dict() == as_dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_dynamic_mappings_with_sub_fields():
|
||||||
|
mappings_def = "root.sub_field.*"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root": {"field1": "value1",
|
||||||
|
"field2": "value2",
|
||||||
|
"sub_field": {"field3": "value3",
|
||||||
|
"field4": "value4"
|
||||||
|
}}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert isinstance(actual, Expando)
|
||||||
|
assert actual.as_dict() == {"root.sub_field.field3": "value3",
|
||||||
|
"root.sub_field.field4": "value4"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_dynamic_mappings_with_sub_fields_and_renames():
|
||||||
|
mappings_def = "root.sub_field.*=*"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root": {"field1": "value1",
|
||||||
|
"field2": "value2",
|
||||||
|
"sub_field": {"field3": "value3",
|
||||||
|
"field4": "value4"
|
||||||
|
}}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert isinstance(actual, Expando)
|
||||||
|
assert actual.as_dict() == {"field3": "value3",
|
||||||
|
"field4": "value4"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_dynamic_mappings_and_rename_them():
|
||||||
|
mappings_def = "*=*" # does not really have effects as '*' only goes down one level
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root1": {"field1": "value1",
|
||||||
|
"field2": "value2"},
|
||||||
|
"root2": {"field3": "value3",
|
||||||
|
"field4": "value4"}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert isinstance(actual, Expando)
|
||||||
|
assert actual.as_dict() == as_dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_present_static_and_dynamic_mappings():
|
||||||
|
mappings_def = "root.field1 = renamed_1, root.sub_field.*"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root": {"field1": "value1",
|
||||||
|
"field2": "value2",
|
||||||
|
"sub_field": {"field3": "value3",
|
||||||
|
"field4": "value4"
|
||||||
|
}}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert isinstance(actual, Expando)
|
||||||
|
assert actual.as_dict() == {"renamed_1": "value1",
|
||||||
|
"root.sub_field.field3": "value3",
|
||||||
|
"root.sub_field.field4": "value4"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_another_example_of_static_and_dynamic_mappings():
|
||||||
|
mappings_def = "* , field1 = renamed_1"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||||
|
|
||||||
|
actual = presenter.present(data)
|
||||||
|
|
||||||
|
assert actual == Expando({"renamed_1": "value1", "field2": "value2", "field3": "value3"}) # field3 is removed
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_detect_conflict_when_dynamically_renaming_a_field():
|
||||||
|
mappings_def = "root_1.*=*, root_2.*=*"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root_1": {"field1": "value1",
|
||||||
|
"field2": "value2"},
|
||||||
|
"root_2": {"field1": "value1",
|
||||||
|
"field2": "value2"}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError) as e:
|
||||||
|
presenter.present(data)
|
||||||
|
|
||||||
|
assert str(e.value) == "Collision detected for field 'field1'. It is mapped from both 'root_1.*=*' and 'root_2.*=*'."
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_detect_declaration_error():
|
||||||
|
mappings_def = "field1 ,, field2"
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
data = Expando({"field1": "value1", "field2": "value2", "field3": "value3"})
|
||||||
|
|
||||||
|
with pytest.raises(ValueError) as e:
|
||||||
|
presenter.present(data)
|
||||||
|
|
||||||
|
|
||||||
|
def test_i_can_detect_dynamic_error_declaration():
|
||||||
|
mappings_def = "root.field1.*" # field1 is not an object
|
||||||
|
presenter = DefaultDataPresenter("comp_id", mappings_def)
|
||||||
|
as_dict = {"root": {"field1": "value1",
|
||||||
|
"field2": "value2",
|
||||||
|
"sub_field": {"field3": "value3",
|
||||||
|
"field4": "value4"
|
||||||
|
}}}
|
||||||
|
data = Expando(as_dict)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError) as e:
|
||||||
|
presenter.present(data)
|
||||||
@@ -2,6 +2,8 @@ from unittest.mock import MagicMock
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from core.Expando import Expando
|
||||||
|
from workflow.DefaultDataPresenter import DefaultDataPresenter
|
||||||
from workflow.engine import WorkflowEngine, DataProcessor, DataProducer, DataFilter, DataPresenter
|
from workflow.engine import WorkflowEngine, DataProcessor, DataProducer, DataFilter, DataPresenter
|
||||||
|
|
||||||
|
|
||||||
@@ -11,6 +13,24 @@ def engine():
|
|||||||
return WorkflowEngine()
|
return WorkflowEngine()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def presenter_sample_data():
|
||||||
|
return Expando({
|
||||||
|
"id": 123,
|
||||||
|
"title": "My Awesome Task",
|
||||||
|
"creator": {
|
||||||
|
"id": 1,
|
||||||
|
"name": "John Doe",
|
||||||
|
"email": "john.doe@example.com"
|
||||||
|
},
|
||||||
|
"assignee": {
|
||||||
|
"id": 2,
|
||||||
|
"name": "Jane Smith",
|
||||||
|
"email": "jane.smith@example.com"
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
def test_empty_workflow_initialization(engine):
|
def test_empty_workflow_initialization(engine):
|
||||||
"""Test that a new WorkflowEngine has no processors."""
|
"""Test that a new WorkflowEngine has no processors."""
|
||||||
assert len(engine.processors) == 0
|
assert len(engine.processors) == 0
|
||||||
@@ -124,3 +144,21 @@ def test_branching_workflow(engine):
|
|||||||
|
|
||||||
result = engine.run_to_list()
|
result = engine.run_to_list()
|
||||||
assert result == [1, 10, 2, 20]
|
assert result == [1, 10, 2, 20]
|
||||||
|
|
||||||
|
|
||||||
|
def test_presenter_i_can_use_wildcards(presenter_sample_data):
|
||||||
|
presenter1 = DefaultDataPresenter("component_id", "id, creator.*")
|
||||||
|
res = presenter1.present(presenter_sample_data).as_dict()
|
||||||
|
assert res == {"id": 123, "creator.id": 1, "creator.name": "John Doe", "creator.email": "john.doe@example.com"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_presenter_i_can_rename_wildcard_with_specific_override(presenter_sample_data):
|
||||||
|
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, creator.name=author_name")
|
||||||
|
res = presenter1.present(presenter_sample_data).as_dict()
|
||||||
|
assert res == {"id": 1, "email": "john.doe@example.com", "author_name": "John Doe"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_presenter_i_can_manage_collisions(presenter_sample_data):
|
||||||
|
presenter1 = DefaultDataPresenter("component_id", "creator.*=*, assignee.*=*")
|
||||||
|
with pytest.raises(ValueError, match="Collision detected for field"):
|
||||||
|
presenter1.present(presenter_sample_data).as_dict()
|
||||||
|
|||||||
Reference in New Issue
Block a user