Added syntax colorization
This commit is contained in:
@@ -9,6 +9,7 @@ wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/codemirror.min.cs
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/addon/hint/show-hint.min.js
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/addon/hint/show-hint.min.css
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/addon/display/placeholder.min.js
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/addon/lint/lint.min.css
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/addon/lint/lint.min.js
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/addon/lint/lint.min.css
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/addon/lint/lint.min.js
|
||||
wget https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.65.20/addon/mode/simple.min.js
|
||||
```
|
||||
2
src/myfasthtml/assets/lint.min.js
vendored
2
src/myfasthtml/assets/lint.min.js
vendored
@@ -1 +1 @@
|
||||
!function(t){"object"==typeof exports&&"object"==typeof module?t(require("../../lib/codemirror")):"function"==typeof define&&define.amd?define(["../../lib/codemirror"],t):t(CodeMirror)}(function(h){"use strict";var g="CodeMirror-lint-markers",v="CodeMirror-lint-line-";function u(t){t.parentNode&&t.parentNode.removeChild(t)}function C(t,e,n,o){t=t,e=e,n=n,(i=document.createElement("div")).className="CodeMirror-lint-tooltip cm-s-"+t.options.theme,i.appendChild(n.cloneNode(!0)),(t.state.lint.options.selfContain?t.getWrapperElement():document.body).appendChild(i),h.on(document,"mousemove",a),a(e),null!=i.style.opacity&&(i.style.opacity=1);var i,r=i;function a(t){if(!i.parentNode)return h.off(document,"mousemove",a);i.style.top=Math.max(0,t.clientY-i.offsetHeight-5)+"px",i.style.left=t.clientX+5+"px"}function s(){var t;h.off(o,"mouseout",s),r&&((t=r).parentNode&&(null==t.style.opacity&&u(t),t.style.opacity=0,setTimeout(function(){u(t)},600)),r=null)}var l=setInterval(function(){if(r)for(var t=o;;t=t.parentNode){if((t=t&&11==t.nodeType?t.host:t)==document.body)return;if(!t){s();break}}if(!r)return clearInterval(l)},400);h.on(o,"mouseout",s)}function a(l,t,e){for(var n in this.marked=[],(t=t instanceof Function?{getAnnotations:t}:t)&&!0!==t||(t={}),this.options={},this.linterOptions=t.options||{},o)this.options[n]=o[n];for(var n in t)o.hasOwnProperty(n)?null!=t[n]&&(this.options[n]=t[n]):t.options||(this.linterOptions[n]=t[n]);this.timeout=null,this.hasGutter=e,this.onMouseOver=function(t){var e=l,n=t.target||t.srcElement;if(/\bCodeMirror-lint-mark-/.test(n.className)){for(var n=n.getBoundingClientRect(),o=(n.left+n.right)/2,n=(n.top+n.bottom)/2,i=e.findMarksAt(e.coordsChar({left:o,top:n},"client")),r=[],a=0;a<i.length;++a){var s=i[a].__annotation;s&&r.push(s)}r.length&&!function(t,e,n){for(var o=n.target||n.srcElement,i=document.createDocumentFragment(),r=0;r<e.length;r++){var a=e[r];i.appendChild(M(a))}C(t,n,i,o)}(e,r,t)}},this.waitingFor=0}var o={highlightLines:!1,tooltips:!0,delay:500,lintOnChange:!0,getAnnotations:null,async:!1,selfContain:null,formatAnnotation:null,onUpdateLinting:null};function y(t){var n,e=t.state.lint;e.hasGutter&&t.clearGutter(g),e.options.highlightLines&&(n=t).eachLine(function(t){var e=t.wrapClass&&/\bCodeMirror-lint-line-\w+\b/.exec(t.wrapClass);e&&n.removeLineClass(t,"wrap",e[0])});for(var o=0;o<e.marked.length;++o)e.marked[o].clear();e.marked.length=0}function M(t){var e=(e=t.severity)||"error",n=document.createElement("div");return n.className="CodeMirror-lint-message CodeMirror-lint-message-"+e,void 0!==t.messageHTML?n.innerHTML=t.messageHTML:n.appendChild(document.createTextNode(t.message)),n}function s(e){var t,n,o,i,r,a,s=e.state.lint;function l(){a=-1,o.off("change",l)}!s||(t=(i=s.options).getAnnotations||e.getHelper(h.Pos(0,0),"lint"))&&(i.async||t.async?(i=t,r=(o=e).state.lint,a=++r.waitingFor,o.on("change",l),i(o.getValue(),function(t,e){o.off("change",l),r.waitingFor==a&&(e&&t instanceof h&&(t=e),o.operation(function(){c(o,t)}))},r.linterOptions,o)):(n=t(e.getValue(),s.linterOptions,e))&&(n.then?n.then(function(t){e.operation(function(){c(e,t)})}):e.operation(function(){c(e,n)})))}function c(t,e){var n=t.state.lint;if(n){for(var o,i,r=n.options,a=(y(t),function(t){for(var e=[],n=0;n<t.length;++n){var o=t[n],i=o.from.line;(e[i]||(e[i]=[])).push(o)}return e}(e)),s=0;s<a.length;++s)if(u=a[s]){for(var l=[],u=u.filter(function(t){return!(-1<l.indexOf(t.message))&&l.push(t.message)}),c=null,f=n.hasGutter&&document.createDocumentFragment(),m=0;m<u.length;++m){var p=u[m],d=p.severity;i=d=d||"error",c="error"==(o=c)?o:i,r.formatAnnotation&&(p=r.formatAnnotation(p)),n.hasGutter&&f.appendChild(M(p)),p.to&&n.marked.push(t.markText(p.from,p.to,{className:"CodeMirror-lint-mark CodeMirror-lint-mark-"+d,__annotation:p}))}n.hasGutter&&t.setGutterMarker(s,g,function(e,n,t,o,i){var r=document.createElement("div"),a=r;return r.className="CodeMirror-lint-marker CodeMirror-lint-marker-"+t,o&&((a=r.appendChild(document.createElement("div"))).className="CodeMirror-lint-marker CodeMirror-lint-marker-multiple"),0!=i&&h.on(a,"mouseover",function(t){C(e,t,n,a)}),r}(t,f,c,1<a[s].length,r.tooltips)),r.highlightLines&&t.addLineClass(s,"wrap",v+c)}r.onUpdateLinting&&r.onUpdateLinting(e,a,t)}}function l(t){var e=t.state.lint;e&&(clearTimeout(e.timeout),e.timeout=setTimeout(function(){s(t)},e.options.delay))}h.defineOption("lint",!1,function(t,e,n){if(n&&n!=h.Init&&(y(t),!1!==t.state.lint.options.lintOnChange&&t.off("change",l),h.off(t.getWrapperElement(),"mouseover",t.state.lint.onMouseOver),clearTimeout(t.state.lint.timeout),delete t.state.lint),e){for(var o=t.getOption("gutters"),i=!1,r=0;r<o.length;++r)o[r]==g&&(i=!0);n=t.state.lint=new a(t,e,i);n.options.lintOnChange&&t.on("change",l),0!=n.options.tooltips&&"gutter"!=n.options.tooltips&&h.on(t.getWrapperElement(),"mouseover",n.onMouseOver),s(t)}}),h.defineExtension("performLint",function(){s(this)})});
|
||||
!function(t){"object"==typeof exports&&"object"==typeof module?t(require("../../lib/codemirror")):"function"==typeof define&&define.amd?define(["../../lib/codemirror"],t):t(CodeMirror)}(function(p){"use strict";var h="CodeMirror-lint-markers",g="CodeMirror-lint-line-";function u(t){t.parentNode&&t.parentNode.removeChild(t)}function v(t,e,n,o){t=t,e=e,n=n,(i=document.createElement("div")).className="CodeMirror-lint-tooltip cm-s-"+t.options.theme,i.appendChild(n.cloneNode(!0)),(t.state.lint.options.selfContain?t.getWrapperElement():document.body).appendChild(i),p.on(document,"mousemove",a),a(e),null!=i.style.opacity&&(i.style.opacity=1);var i,r=i;function a(t){if(!i.parentNode)return p.off(document,"mousemove",a);var e=Math.max(0,t.clientY-i.offsetHeight-5),t=Math.max(0,Math.min(t.clientX+5,i.ownerDocument.defaultView.innerWidth-i.offsetWidth));i.style.top=e+"px",i.style.left=t+"px"}function l(){var t;p.off(o,"mouseout",l),r&&((t=r).parentNode&&(null==t.style.opacity&&u(t),t.style.opacity=0,setTimeout(function(){u(t)},600)),r=null)}var s=setInterval(function(){if(r)for(var t=o;;t=t.parentNode){if((t=t&&11==t.nodeType?t.host:t)==document.body)return;if(!t){l();break}}if(!r)return clearInterval(s)},400);p.on(o,"mouseout",l)}function a(s,t,e){for(var n in this.marked=[],(t=t instanceof Function?{getAnnotations:t}:t)&&!0!==t||(t={}),this.options={},this.linterOptions=t.options||{},o)this.options[n]=o[n];for(var n in t)o.hasOwnProperty(n)?null!=t[n]&&(this.options[n]=t[n]):t.options||(this.linterOptions[n]=t[n]);this.timeout=null,this.hasGutter=e,this.onMouseOver=function(t){var e=s,n=t.target||t.srcElement;if(/\bCodeMirror-lint-mark-/.test(n.className)){for(var n=n.getBoundingClientRect(),o=(n.left+n.right)/2,n=(n.top+n.bottom)/2,i=e.findMarksAt(e.coordsChar({left:o,top:n},"client")),r=[],a=0;a<i.length;++a){var l=i[a].__annotation;l&&r.push(l)}r.length&&!function(t,e,n){for(var o=n.target||n.srcElement,i=document.createDocumentFragment(),r=0;r<e.length;r++){var a=e[r];i.appendChild(M(a))}v(t,n,i,o)}(e,r,t)}},this.waitingFor=0}var o={highlightLines:!1,tooltips:!0,delay:500,lintOnChange:!0,getAnnotations:null,async:!1,selfContain:null,formatAnnotation:null,onUpdateLinting:null};function C(t){var n,e=t.state.lint;e.hasGutter&&t.clearGutter(h),e.options.highlightLines&&(n=t).eachLine(function(t){var e=t.wrapClass&&/\bCodeMirror-lint-line-\w+\b/.exec(t.wrapClass);e&&n.removeLineClass(t,"wrap",e[0])});for(var o=0;o<e.marked.length;++o)e.marked[o].clear();e.marked.length=0}function M(t){var e=(e=t.severity)||"error",n=document.createElement("div");return n.className="CodeMirror-lint-message CodeMirror-lint-message-"+e,void 0!==t.messageHTML?n.innerHTML=t.messageHTML:n.appendChild(document.createTextNode(t.message)),n}function l(e){var t,n,o,i,r,a,l=e.state.lint;function s(){a=-1,o.off("change",s)}!l||(t=(i=l.options).getAnnotations||e.getHelper(p.Pos(0,0),"lint"))&&(i.async||t.async?(i=t,r=(o=e).state.lint,a=++r.waitingFor,o.on("change",s),i(o.getValue(),function(t,e){o.off("change",s),r.waitingFor==a&&(e&&t instanceof p&&(t=e),o.operation(function(){c(o,t)}))},r.linterOptions,o)):(n=t(e.getValue(),l.linterOptions,e))&&(n.then?n.then(function(t){e.operation(function(){c(e,t)})}):e.operation(function(){c(e,n)})))}function c(t,e){var n=t.state.lint;if(n){for(var o,i,r=n.options,a=(C(t),function(t){for(var e=[],n=0;n<t.length;++n){var o=t[n],i=o.from.line;(e[i]||(e[i]=[])).push(o)}return e}(e)),l=0;l<a.length;++l){var s=a[l];if(s){for(var u=null,c=n.hasGutter&&document.createDocumentFragment(),f=0;f<s.length;++f){var m=s[f],d=m.severity;i=d=d||"error",u="error"==(o=u)?o:i,r.formatAnnotation&&(m=r.formatAnnotation(m)),n.hasGutter&&c.appendChild(M(m)),m.to&&n.marked.push(t.markText(m.from,m.to,{className:"CodeMirror-lint-mark CodeMirror-lint-mark-"+d,__annotation:m}))}n.hasGutter&&t.setGutterMarker(l,h,function(e,n,t,o,i){var r=document.createElement("div"),a=r;return r.className="CodeMirror-lint-marker CodeMirror-lint-marker-"+t,o&&((a=r.appendChild(document.createElement("div"))).className="CodeMirror-lint-marker CodeMirror-lint-marker-multiple"),0!=i&&p.on(a,"mouseover",function(t){v(e,t,n,a)}),r}(t,c,u,1<s.length,r.tooltips)),r.highlightLines&&t.addLineClass(l,"wrap",g+u)}}r.onUpdateLinting&&r.onUpdateLinting(e,a,t)}}function s(t){var e=t.state.lint;e&&(clearTimeout(e.timeout),e.timeout=setTimeout(function(){l(t)},e.options.delay))}p.defineOption("lint",!1,function(t,e,n){if(n&&n!=p.Init&&(C(t),!1!==t.state.lint.options.lintOnChange&&t.off("change",s),p.off(t.getWrapperElement(),"mouseover",t.state.lint.onMouseOver),clearTimeout(t.state.lint.timeout),delete t.state.lint),e){for(var o=t.getOption("gutters"),i=!1,r=0;r<o.length;++r)o[r]==h&&(i=!0);n=t.state.lint=new a(t,e,i);n.options.lintOnChange&&t.on("change",s),0!=n.options.tooltips&&"gutter"!=n.options.tooltips&&p.on(t.getWrapperElement(),"mouseover",n.onMouseOver),l(t)}}),p.defineExtension("performLint",function(){l(this)})});
|
||||
@@ -1187,3 +1187,213 @@
|
||||
.dt2-column-manager-label:hover {
|
||||
background-color: var(--color-base-300);
|
||||
}
|
||||
|
||||
/* *********************************************** */
|
||||
/* ********** CodeMirror DaisyUI Theme *********** */
|
||||
/* *********************************************** */
|
||||
|
||||
/* Theme selector - uses DaisyUI variables for automatic theme switching */
|
||||
.cm-s-daisy.CodeMirror {
|
||||
background-color: var(--color-base-100);
|
||||
color: var(--color-base-content);
|
||||
font-family: var(--font-mono, ui-monospace, 'Cascadia Code', 'Source Code Pro', Menlo, Monaco, 'Courier New', monospace);
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
height: auto;
|
||||
border-radius: 0.5rem;
|
||||
border: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
/* Cursor */
|
||||
.cm-s-daisy .CodeMirror-cursor {
|
||||
border-left-color: var(--color-primary);
|
||||
border-left-width: 2px;
|
||||
}
|
||||
|
||||
/* Selection */
|
||||
.cm-s-daisy .CodeMirror-selected {
|
||||
background-color: var(--color-selection) !important;
|
||||
}
|
||||
|
||||
.cm-s-daisy.CodeMirror-focused .CodeMirror-selected {
|
||||
background-color: color-mix(in oklab, var(--color-primary) 30%, transparent) !important;
|
||||
}
|
||||
|
||||
/* Line numbers and gutters */
|
||||
.cm-s-daisy .CodeMirror-gutters {
|
||||
background-color: var(--color-base-200);
|
||||
border-right: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.cm-s-daisy .CodeMirror-linenumber {
|
||||
color: color-mix(in oklab, var(--color-base-content) 50%, transparent);
|
||||
padding: 0 8px;
|
||||
}
|
||||
|
||||
/* Active line */
|
||||
.cm-s-daisy .CodeMirror-activeline-background {
|
||||
background-color: color-mix(in oklab, var(--color-base-content) 5%, transparent);
|
||||
}
|
||||
|
||||
.cm-s-daisy .CodeMirror-activeline-gutter {
|
||||
background-color: var(--color-base-300);
|
||||
}
|
||||
|
||||
/* Matching brackets */
|
||||
.cm-s-daisy .CodeMirror-matchingbracket {
|
||||
color: var(--color-success) !important;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.cm-s-daisy .CodeMirror-nonmatchingbracket {
|
||||
color: var(--color-error) !important;
|
||||
}
|
||||
|
||||
/* *********************************************** */
|
||||
/* ******** CodeMirror Syntax Highlighting ******* */
|
||||
/* *********************************************** */
|
||||
|
||||
/* Keywords (column, row, cell, if, not, and, or, in, between, case) */
|
||||
.cm-s-daisy .cm-keyword {
|
||||
color: var(--color-primary);
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
/* Built-in functions (style, format) */
|
||||
.cm-s-daisy .cm-builtin {
|
||||
color: var(--color-secondary);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
/* Operators (==, <, >, contains, startswith, etc.) */
|
||||
.cm-s-daisy .cm-operator {
|
||||
color: var(--color-warning);
|
||||
}
|
||||
|
||||
/* Strings ("error", "EUR", etc.) */
|
||||
.cm-s-daisy .cm-string {
|
||||
color: var(--color-success);
|
||||
}
|
||||
|
||||
/* Numbers (0, 100, 3.14) */
|
||||
.cm-s-daisy .cm-number {
|
||||
color: var(--color-accent);
|
||||
}
|
||||
|
||||
/* Booleans (True, False, true, false) */
|
||||
.cm-s-daisy .cm-atom {
|
||||
color: var(--color-info);
|
||||
}
|
||||
|
||||
/* Special variables (value, col, row, cell) */
|
||||
.cm-s-daisy .cm-variable-2 {
|
||||
color: var(--color-accent);
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* Cell IDs (tcell_*) */
|
||||
.cm-s-daisy .cm-variable-3 {
|
||||
color: color-mix(in oklab, var(--color-base-content) 70%, transparent);
|
||||
}
|
||||
|
||||
/* Comments (#...) */
|
||||
.cm-s-daisy .cm-comment {
|
||||
color: color-mix(in oklab, var(--color-base-content) 50%, transparent);
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* Property names (bold=, color=, etc.) */
|
||||
.cm-s-daisy .cm-property {
|
||||
color: var(--color-base-content);
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
/* Errors/invalid syntax */
|
||||
.cm-s-daisy .cm-error {
|
||||
color: var(--color-error);
|
||||
text-decoration: underline wavy;
|
||||
}
|
||||
|
||||
/* *********************************************** */
|
||||
/* ********** CodeMirror Autocomplete ************ */
|
||||
/* *********************************************** */
|
||||
|
||||
/* Autocomplete dropdown container */
|
||||
.CodeMirror-hints {
|
||||
background-color: var(--color-base-100);
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: 0.5rem;
|
||||
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06);
|
||||
font-family: var(--font-mono, ui-monospace, monospace);
|
||||
font-size: 13px;
|
||||
max-height: 20em;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
/* Individual hint items */
|
||||
.CodeMirror-hint {
|
||||
color: var(--color-base-content);
|
||||
padding: 4px 8px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/* Hovered/selected hint */
|
||||
.CodeMirror-hint-active {
|
||||
background-color: var(--color-primary);
|
||||
color: var(--color-primary-content);
|
||||
}
|
||||
|
||||
/* *********************************************** */
|
||||
/* ********** CodeMirror Lint Markers ************ */
|
||||
/* *********************************************** */
|
||||
|
||||
/* Lint gutter marker */
|
||||
.CodeMirror-lint-marker {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.CodeMirror-lint-marker-error {
|
||||
color: var(--color-error);
|
||||
}
|
||||
|
||||
.CodeMirror-lint-marker-warning {
|
||||
color: var(--color-warning);
|
||||
}
|
||||
|
||||
/* Lint tooltip */
|
||||
.CodeMirror-lint-tooltip {
|
||||
background-color: var(--color-base-100);
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: 0.375rem;
|
||||
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1);
|
||||
color: var(--color-base-content);
|
||||
font-family: var(--font-sans, ui-sans-serif, system-ui);
|
||||
font-size: 13px;
|
||||
padding: 8px 12px;
|
||||
max-width: 400px;
|
||||
}
|
||||
|
||||
.CodeMirror-lint-message-error {
|
||||
color: var(--color-error);
|
||||
}
|
||||
|
||||
.CodeMirror-lint-message-warning {
|
||||
color: var(--color-warning);
|
||||
}
|
||||
|
||||
/* *********************************************** */
|
||||
/* ********** DslEditor Wrapper Styles *********** */
|
||||
/* *********************************************** */
|
||||
|
||||
/* Wrapper container for DslEditor */
|
||||
.mf-dsl-editor-wrapper {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
/* Editor container */
|
||||
.mf-dsl-editor {
|
||||
border-radius: 0.5rem;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
@@ -2250,6 +2250,27 @@ function initDslEditor(config) {
|
||||
// Mark lint function as async for CodeMirror
|
||||
dslLint.async = true;
|
||||
|
||||
/* --------------------------------------------------
|
||||
* Register Simple Mode if available and config provided
|
||||
* -------------------------------------------------- */
|
||||
|
||||
let modeName = null;
|
||||
|
||||
if (typeof CodeMirror.defineSimpleMode !== "undefined" && dsl && dsl.simpleModeConfig) {
|
||||
// Generate unique mode name from DSL name
|
||||
modeName = `dsl-${dsl.name.toLowerCase().replace(/\s+/g, '-')}`;
|
||||
|
||||
// Register the mode if not already registered
|
||||
if (!CodeMirror.modes[modeName]) {
|
||||
try {
|
||||
CodeMirror.defineSimpleMode(modeName, dsl.simpleModeConfig);
|
||||
} catch (err) {
|
||||
console.error(`Failed to register Simple Mode for ${dsl.name}:`, err);
|
||||
modeName = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* --------------------------------------------------
|
||||
* Create CodeMirror editor
|
||||
* -------------------------------------------------- */
|
||||
@@ -2262,6 +2283,8 @@ function initDslEditor(config) {
|
||||
|
||||
const editorOptions = {
|
||||
value: textarea.value || "",
|
||||
mode: modeName || undefined, // Use Simple Mode if available
|
||||
theme: "daisy", // Use DaisyUI theme for automatic theme switching
|
||||
lineNumbers: !!lineNumbers,
|
||||
readOnly: !!readonly,
|
||||
placeholder: placeholder || "",
|
||||
|
||||
1
src/myfasthtml/assets/simple.min.js
vendored
Normal file
1
src/myfasthtml/assets/simple.min.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!function(e){"object"==typeof exports&&"object"==typeof module?e(require("../../lib/codemirror")):"function"==typeof define&&define.amd?define(["../../lib/codemirror"],e):e(CodeMirror)}(function(v){"use strict";function h(e,t){if(!e.hasOwnProperty(t))throw new Error("Undefined state "+t+" in simple mode")}function k(e,t){if(!e)return/(?:)/;var n="";return e=e instanceof RegExp?(e.ignoreCase&&(n="i"),e.unicode&&(n+="u"),e.source):String(e),new RegExp((!1===t?"":"^")+"(?:"+e+")",n)}function g(e,t){(e.next||e.push)&&h(t,e.next||e.push),this.regex=k(e.regex),this.token=function(e){if(!e)return null;if(e.apply)return e;if("string"==typeof e)return e.replace(/\./g," ");for(var t=[],n=0;n<e.length;n++)t.push(e[n]&&e[n].replace(/\./g," "));return t}(e.token),this.data=e}v.defineSimpleMode=function(e,t){v.defineMode(e,function(e){return v.simpleMode(e,t)})},v.simpleMode=function(e,t){h(t,"start");var n,a={},o=t.meta||{},r=!1;for(n in t)if(n!=o&&t.hasOwnProperty(n))for(var i=a[n]=[],l=t[n],s=0;s<l.length;s++){var d=l[s];i.push(new g(d,t)),(d.indent||d.dedent)&&(r=!0)}var c,p,S,m,u={startState:function(){return{state:"start",pending:null,local:null,localState:null,indent:r?[]:null}},copyState:function(e){var t={state:e.state,pending:e.pending,local:e.local,localState:null,indent:e.indent&&e.indent.slice(0)};e.localState&&(t.localState=v.copyState(e.local.mode,e.localState)),e.stack&&(t.stack=e.stack.slice(0));for(var n=e.persistentStates;n;n=n.next)t.persistentStates={mode:n.mode,spec:n.spec,state:n.state==e.localState?t.localState:v.copyState(n.mode,n.state),next:t.persistentStates};return t},token:(m=e,function(e,t){var n,a;if(t.pending)return a=t.pending.shift(),0==t.pending.length&&(t.pending=null),e.pos+=a.text.length,a.token;if(t.local)return t.local.end&&e.match(t.local.end)?(n=t.local.endToken||null,t.local=t.localState=null):(n=t.local.mode.token(e,t.localState),t.local.endScan&&(a=t.local.endScan.exec(e.current()))&&(e.pos=e.start+a.index)),n;for(var o=S[t.state],r=0;r<o.length;r++){var i=o[r],l=(!i.data.sol||e.sol())&&e.match(i.regex);if(l){if(i.data.next?t.state=i.data.next:i.data.push?((t.stack||(t.stack=[])).push(t.state),t.state=i.data.push):i.data.pop&&t.stack&&t.stack.length&&(t.state=t.stack.pop()),i.data.mode){h=d=f=s=u=p=c=d=void 0;var s,d=m,c=t,p=i.data.mode,u=i.token;if(p.persistent)for(var f=c.persistentStates;f&&!s;f=f.next)(p.spec?function e(t,n){if(t===n)return!0;if(!t||"object"!=typeof t||!n||"object"!=typeof n)return!1;var a=0;for(var o in t)if(t.hasOwnProperty(o)){if(!n.hasOwnProperty(o)||!e(t[o],n[o]))return!1;a++}for(var o in n)n.hasOwnProperty(o)&&a--;return 0==a}(p.spec,f.spec):p.mode==f.mode)&&(s=f);var d=s?s.mode:p.mode||v.getMode(d,p.spec),h=s?s.state:v.startState(d);p.persistent&&!s&&(c.persistentStates={mode:d,spec:p.spec,state:h,next:c.persistentStates}),c.localState=h,c.local={mode:d,end:p.end&&k(p.end),endScan:p.end&&!1!==p.forceEnd&&k(p.end,!1),endToken:u&&u.join?u[u.length-1]:u}}i.data.indent&&t.indent.push(e.indentation()+m.indentUnit),i.data.dedent&&t.indent.pop();h=i.token;if(h&&h.apply&&(h=h(l)),2<l.length&&i.token&&"string"!=typeof i.token){for(var g=2;g<l.length;g++)l[g]&&(t.pending||(t.pending=[])).push({text:l[g],token:i.token[g-1]});return e.backUp(l[0].length-(l[1]?l[1].length:0)),h[0]}return h&&h.join?h[0]:h}}return e.next(),null}),innerMode:function(e){return e.local&&{mode:e.local.mode,state:e.localState}},indent:(c=S=a,function(e,t,n){if(e.local&&e.local.mode.indent)return e.local.mode.indent(e.localState,t,n);if(null==e.indent||e.local||p.dontIndentStates&&-1<function(e,t){for(var n=0;n<t.length;n++)if(t[n]===e)return!0}(e.state,p.dontIndentStates))return v.Pass;var a=e.indent.length-1,o=c[e.state];e:for(;;){for(var r=0;r<o.length;r++){var i=o[r];if(i.data.dedent&&!1!==i.data.dedentIfLineStart){var l=i.regex.exec(t);if(l&&l[0]){a--,(i.next||i.push)&&(o=c[i.next||i.push]),t=t.slice(l[0].length);continue e}}}break}return a<0?0:e.indent[a]})};if(p=o)for(var f in o)o.hasOwnProperty(f)&&(u[f]=o[f]);return u}});
|
||||
@@ -137,6 +137,11 @@ class DslEditor(MultipleInstance):
|
||||
|
||||
def _get_editor_config(self) -> dict:
|
||||
"""Build the JavaScript configuration object."""
|
||||
# Get Simple Mode config if available
|
||||
simple_mode_config = None
|
||||
if hasattr(self._dsl, 'simple_mode_config'):
|
||||
simple_mode_config = self._dsl.simple_mode_config
|
||||
|
||||
config = {
|
||||
"elementId": str(self._id),
|
||||
"textareaId": f"ta_{self._id}",
|
||||
@@ -150,6 +155,7 @@ class DslEditor(MultipleInstance):
|
||||
"dsl": {
|
||||
"name": self._dsl.name,
|
||||
"completions": self._dsl.completions,
|
||||
"simpleModeConfig": simple_mode_config,
|
||||
},
|
||||
}
|
||||
return config
|
||||
|
||||
@@ -9,9 +9,9 @@ from abc import ABC, abstractmethod
|
||||
from functools import cached_property
|
||||
from typing import List, Dict, Any
|
||||
|
||||
# TODO: Replace with lark_to_simple_mode when implemented
|
||||
from myfasthtml.core.dsl.lark_to_lezer import (
|
||||
lark_to_lezer_grammar,
|
||||
extract_completions_from_grammar,
|
||||
extract_completions_from_grammar, # Will be moved to utils.py
|
||||
)
|
||||
from myfasthtml.core.utils import make_safe_id
|
||||
|
||||
@@ -39,18 +39,6 @@ class DSLDefinition(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
@cached_property
|
||||
def lezer_grammar(self) -> str:
|
||||
"""
|
||||
Return the Lezer grammar derived from the Lark grammar.
|
||||
|
||||
This is cached after first computation.
|
||||
|
||||
Returns:
|
||||
The Lezer grammar as a string.
|
||||
"""
|
||||
return lark_to_lezer_grammar(self.get_grammar())
|
||||
|
||||
@cached_property
|
||||
def completions(self) -> Dict[str, List[str]]:
|
||||
"""
|
||||
@@ -68,6 +56,26 @@ class DSLDefinition(ABC):
|
||||
"""
|
||||
return extract_completions_from_grammar(self.get_grammar())
|
||||
|
||||
@cached_property
|
||||
def simple_mode_config(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Return the CodeMirror 5 Simple Mode configuration for syntax highlighting.
|
||||
|
||||
This is cached after first computation.
|
||||
|
||||
Returns:
|
||||
Dictionary with Simple Mode rules:
|
||||
{
|
||||
"start": [
|
||||
{"regex": "...", "token": "keyword"},
|
||||
{"regex": "...", "token": "string"},
|
||||
...
|
||||
]
|
||||
}
|
||||
"""
|
||||
from myfasthtml.core.dsl.lark_to_simple_mode import lark_to_simple_mode
|
||||
return lark_to_simple_mode(self.get_grammar())
|
||||
|
||||
def get_editor_config(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Return the configuration for the DslEditor JavaScript initialization.
|
||||
|
||||
@@ -1,256 +1,267 @@
|
||||
"""
|
||||
Utilities for converting Lark grammars to Lezer format and extracting completions.
|
||||
|
||||
This module provides functions to:
|
||||
1. Transform a Lark grammar to a Lezer grammar for CodeMirror
|
||||
2. Extract completion items (keywords, operators, etc.) from a Lark grammar
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Dict, List, Set
|
||||
|
||||
|
||||
def lark_to_lezer_grammar(lark_grammar: str) -> str:
|
||||
"""
|
||||
Convert a Lark grammar to a Lezer grammar.
|
||||
|
||||
This is a simplified converter that handles common Lark patterns.
|
||||
Complex grammars may require manual adjustment.
|
||||
|
||||
Args:
|
||||
lark_grammar: The Lark grammar string.
|
||||
|
||||
Returns:
|
||||
The Lezer grammar string.
|
||||
"""
|
||||
lines = lark_grammar.strip().split("\n")
|
||||
lezer_rules = []
|
||||
tokens = []
|
||||
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
|
||||
# Skip empty lines and comments
|
||||
if not line or line.startswith("//") or line.startswith("#"):
|
||||
continue
|
||||
|
||||
# Skip Lark-specific directives
|
||||
if line.startswith("%"):
|
||||
continue
|
||||
|
||||
# Parse rule definitions (lowercase names only)
|
||||
rule_match = re.match(r"^([a-z_][a-z0-9_]*)\s*:\s*(.+)$", line)
|
||||
if rule_match:
|
||||
name, body = rule_match.groups()
|
||||
lezer_rule = _convert_rule(name, body)
|
||||
if lezer_rule:
|
||||
lezer_rules.append(lezer_rule)
|
||||
continue
|
||||
|
||||
# Parse terminal definitions (uppercase names)
|
||||
terminal_match = re.match(r"^([A-Z_][A-Z0-9_]*)\s*:\s*(.+)$", line)
|
||||
if terminal_match:
|
||||
name, pattern = terminal_match.groups()
|
||||
token = _convert_terminal(name, pattern)
|
||||
if token:
|
||||
tokens.append(token)
|
||||
|
||||
# Build Lezer grammar
|
||||
lezer_output = ["@top Start { scope+ }", ""]
|
||||
|
||||
# Add rules
|
||||
for rule in lezer_rules:
|
||||
lezer_output.append(rule)
|
||||
|
||||
lezer_output.append("")
|
||||
lezer_output.append("@tokens {")
|
||||
|
||||
# Add tokens
|
||||
for token in tokens:
|
||||
lezer_output.append(f" {token}")
|
||||
|
||||
# Add common tokens
|
||||
lezer_output.extend([
|
||||
' whitespace { $[ \\t]+ }',
|
||||
' newline { $[\\n\\r] }',
|
||||
' Comment { "#" ![$\\n]* }',
|
||||
])
|
||||
|
||||
lezer_output.append("}")
|
||||
lezer_output.append("")
|
||||
lezer_output.append("@skip { whitespace | Comment }")
|
||||
|
||||
return "\n".join(lezer_output)
|
||||
|
||||
|
||||
def _convert_rule(name: str, body: str) -> str:
|
||||
"""Convert a single Lark rule to Lezer format."""
|
||||
# Skip internal rules (starting with _)
|
||||
if name.startswith("_"):
|
||||
return ""
|
||||
|
||||
# Convert rule name to PascalCase for Lezer
|
||||
lezer_name = _to_pascal_case(name)
|
||||
|
||||
# Convert body
|
||||
lezer_body = _convert_body(body)
|
||||
|
||||
if lezer_body:
|
||||
return f"{lezer_name} {{ {lezer_body} }}"
|
||||
return ""
|
||||
|
||||
|
||||
def _convert_terminal(name: str, pattern: str) -> str:
|
||||
"""Convert a Lark terminal to Lezer token format."""
|
||||
pattern = pattern.strip()
|
||||
|
||||
# Handle regex patterns
|
||||
if pattern.startswith("/") and pattern.endswith("/"):
|
||||
regex = pattern[1:-1]
|
||||
# Convert to Lezer regex format
|
||||
return f'{name} {{ ${regex}$ }}'
|
||||
|
||||
# Handle string literals
|
||||
if pattern.startswith('"') or pattern.startswith("'"):
|
||||
return f'{name} {{ {pattern} }}'
|
||||
|
||||
# Handle alternatives (literal strings separated by |)
|
||||
if "|" in pattern:
|
||||
alternatives = [alt.strip() for alt in pattern.split("|")]
|
||||
if all(alt.startswith('"') or alt.startswith("'") for alt in alternatives):
|
||||
return f'{name} {{ {" | ".join(alternatives)} }}'
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def _convert_body(body: str) -> str:
|
||||
"""Convert the body of a Lark rule to Lezer format."""
|
||||
# Remove inline transformations (-> name)
|
||||
body = re.sub(r"\s*->\s*\w+", "", body)
|
||||
|
||||
# Convert alternatives
|
||||
parts = []
|
||||
for alt in body.split("|"):
|
||||
alt = alt.strip()
|
||||
if alt:
|
||||
converted = _convert_sequence(alt)
|
||||
if converted:
|
||||
parts.append(converted)
|
||||
|
||||
return " | ".join(parts)
|
||||
|
||||
|
||||
def _convert_sequence(seq: str) -> str:
|
||||
"""Convert a sequence of items in a rule."""
|
||||
items = []
|
||||
|
||||
# Tokenize the sequence
|
||||
tokens = re.findall(
|
||||
r'"[^"]*"|\'[^\']*\'|/[^/]+/|\([^)]+\)|\[[^\]]+\]|[a-zA-Z_][a-zA-Z0-9_]*|\?|\*|\+',
|
||||
seq
|
||||
)
|
||||
|
||||
for token in tokens:
|
||||
if token.startswith('"') or token.startswith("'"):
|
||||
# String literal
|
||||
items.append(token)
|
||||
elif token.startswith("("):
|
||||
# Group
|
||||
inner = token[1:-1]
|
||||
items.append(f"({_convert_body(inner)})")
|
||||
elif token.startswith("["):
|
||||
# Optional group in Lark
|
||||
inner = token[1:-1]
|
||||
items.append(f"({_convert_body(inner)})?")
|
||||
elif token in ("?", "*", "+"):
|
||||
# Quantifiers - attach to previous item
|
||||
if items:
|
||||
items[-1] = items[-1] + token
|
||||
elif token.isupper() or token.startswith("_"):
|
||||
# Terminal reference
|
||||
items.append(token)
|
||||
elif token.islower() or "_" in token:
|
||||
# Rule reference - convert to PascalCase
|
||||
items.append(_to_pascal_case(token))
|
||||
|
||||
return " ".join(items)
|
||||
|
||||
|
||||
def _to_pascal_case(name: str) -> str:
|
||||
"""Convert snake_case to PascalCase."""
|
||||
return "".join(word.capitalize() for word in name.split("_"))
|
||||
|
||||
|
||||
def extract_completions_from_grammar(lark_grammar: str) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Extract completion items from a Lark grammar.
|
||||
|
||||
Parses the grammar to find:
|
||||
- Keywords (reserved words like if, not, and)
|
||||
- Operators (==, !=, contains, etc.)
|
||||
- Functions (style, format, etc.)
|
||||
- Types (number, date, boolean, etc.)
|
||||
- Literals (True, False, etc.)
|
||||
|
||||
Args:
|
||||
lark_grammar: The Lark grammar string.
|
||||
|
||||
Returns:
|
||||
Dictionary with completion categories.
|
||||
"""
|
||||
keywords: Set[str] = set()
|
||||
operators: Set[str] = set()
|
||||
functions: Set[str] = set()
|
||||
types: Set[str] = set()
|
||||
literals: Set[str] = set()
|
||||
|
||||
# Find all quoted strings (potential keywords/operators)
|
||||
quoted_strings = re.findall(r'"([^"]+)"', lark_grammar)
|
||||
|
||||
# Also look for terminal definitions with string alternatives (e.g., BOOLEAN: "True" | "False")
|
||||
terminal_literals = re.findall(r'[A-Z_]+:\s*"([^"]+)"(?:\s*\|\s*"([^"]+)")*', lark_grammar)
|
||||
for match in terminal_literals:
|
||||
for literal in match:
|
||||
if literal:
|
||||
quoted_strings.append(literal)
|
||||
|
||||
for s in quoted_strings:
|
||||
s_lower = s.lower()
|
||||
|
||||
# Classify based on pattern
|
||||
if s in ("==", "!=", "<=", "<", ">=", ">", "+", "-", "*", "/"):
|
||||
operators.add(s)
|
||||
elif s_lower in ("contains", "startswith", "endswith", "in", "between", "isempty", "isnotempty"):
|
||||
operators.add(s_lower)
|
||||
elif s_lower in ("if", "not", "and", "or"):
|
||||
keywords.add(s_lower)
|
||||
elif s_lower in ("true", "false"):
|
||||
literals.add(s)
|
||||
elif s_lower in ("style", "format"):
|
||||
functions.add(s_lower)
|
||||
elif s_lower in ("column", "row", "cell", "value", "col"):
|
||||
keywords.add(s_lower)
|
||||
elif s_lower in ("number", "date", "boolean", "text", "enum"):
|
||||
types.add(s_lower)
|
||||
elif s_lower == "case":
|
||||
keywords.add(s_lower)
|
||||
|
||||
# Find function-like patterns: word "("
|
||||
function_patterns = re.findall(r'"(\w+)"\s*"?\("', lark_grammar)
|
||||
for func in function_patterns:
|
||||
if func.lower() not in ("true", "false"):
|
||||
functions.add(func.lower())
|
||||
|
||||
# Find type patterns from format_type rule
|
||||
type_match = re.search(r'format_type\s*:\s*(.+?)(?:\n\n|\Z)', lark_grammar, re.DOTALL)
|
||||
if type_match:
|
||||
type_strings = re.findall(r'"(\w+)"', type_match.group(1))
|
||||
types.update(t.lower() for t in type_strings)
|
||||
|
||||
return {
|
||||
"keywords": sorted(keywords),
|
||||
"operators": sorted(operators),
|
||||
"functions": sorted(functions),
|
||||
"types": sorted(types),
|
||||
"literals": sorted(literals),
|
||||
}
|
||||
# """
|
||||
# DEPRECATED: Utilities for converting Lark grammars to Lezer format.
|
||||
#
|
||||
# ⚠️ WARNING: This module is deprecated and will be removed in a future version.
|
||||
#
|
||||
# Original purpose:
|
||||
# - Transform a Lark grammar to a Lezer grammar for CodeMirror 6
|
||||
# - Extract completion items (keywords, operators, etc.) from a Lark grammar
|
||||
#
|
||||
# Deprecation reason:
|
||||
# - CodeMirror 6 requires a bundler (Webpack, Rollup, etc.)
|
||||
# - Incompatible with FastHTML's direct script inclusion approach
|
||||
# - Replaced by CodeMirror 5 Simple Mode (see lark_to_simple_mode.py)
|
||||
#
|
||||
# Migration path:
|
||||
# - Use lark_to_simple_mode.py for CodeMirror 5 syntax highlighting
|
||||
# - extract_completions_from_grammar() is still used and will be moved to utils.py
|
||||
# """
|
||||
#
|
||||
# import re
|
||||
# from typing import Dict, List, Set
|
||||
#
|
||||
#
|
||||
# def lark_to_lezer_grammar(lark_grammar: str) -> str:
|
||||
# """
|
||||
# Convert a Lark grammar to a Lezer grammar.
|
||||
#
|
||||
# This is a simplified converter that handles common Lark patterns.
|
||||
# Complex grammars may require manual adjustment.
|
||||
#
|
||||
# Args:
|
||||
# lark_grammar: The Lark grammar string.
|
||||
#
|
||||
# Returns:
|
||||
# The Lezer grammar string.
|
||||
# """
|
||||
# lines = lark_grammar.strip().split("\n")
|
||||
# lezer_rules = []
|
||||
# tokens = []
|
||||
#
|
||||
# for line in lines:
|
||||
# line = line.strip()
|
||||
#
|
||||
# # Skip empty lines and comments
|
||||
# if not line or line.startswith("//") or line.startswith("#"):
|
||||
# continue
|
||||
#
|
||||
# # Skip Lark-specific directives
|
||||
# if line.startswith("%"):
|
||||
# continue
|
||||
#
|
||||
# # Parse rule definitions (lowercase names only)
|
||||
# rule_match = re.match(r"^([a-z_][a-z0-9_]*)\s*:\s*(.+)$", line)
|
||||
# if rule_match:
|
||||
# name, body = rule_match.groups()
|
||||
# lezer_rule = _convert_rule(name, body)
|
||||
# if lezer_rule:
|
||||
# lezer_rules.append(lezer_rule)
|
||||
# continue
|
||||
#
|
||||
# # Parse terminal definitions (uppercase names)
|
||||
# terminal_match = re.match(r"^([A-Z_][A-Z0-9_]*)\s*:\s*(.+)$", line)
|
||||
# if terminal_match:
|
||||
# name, pattern = terminal_match.groups()
|
||||
# token = _convert_terminal(name, pattern)
|
||||
# if token:
|
||||
# tokens.append(token)
|
||||
#
|
||||
# # Build Lezer grammar
|
||||
# lezer_output = ["@top Start { scope+ }", ""]
|
||||
#
|
||||
# # Add rules
|
||||
# for rule in lezer_rules:
|
||||
# lezer_output.append(rule)
|
||||
#
|
||||
# lezer_output.append("")
|
||||
# lezer_output.append("@tokens {")
|
||||
#
|
||||
# # Add tokens
|
||||
# for token in tokens:
|
||||
# lezer_output.append(f" {token}")
|
||||
#
|
||||
# # Add common tokens
|
||||
# lezer_output.extend([
|
||||
# ' whitespace { $[ \\t]+ }',
|
||||
# ' newline { $[\\n\\r] }',
|
||||
# ' Comment { "#" ![$\\n]* }',
|
||||
# ])
|
||||
#
|
||||
# lezer_output.append("}")
|
||||
# lezer_output.append("")
|
||||
# lezer_output.append("@skip { whitespace | Comment }")
|
||||
#
|
||||
# return "\n".join(lezer_output)
|
||||
#
|
||||
#
|
||||
# def _convert_rule(name: str, body: str) -> str:
|
||||
# """Convert a single Lark rule to Lezer format."""
|
||||
# # Skip internal rules (starting with _)
|
||||
# if name.startswith("_"):
|
||||
# return ""
|
||||
#
|
||||
# # Convert rule name to PascalCase for Lezer
|
||||
# lezer_name = _to_pascal_case(name)
|
||||
#
|
||||
# # Convert body
|
||||
# lezer_body = _convert_body(body)
|
||||
#
|
||||
# if lezer_body:
|
||||
# return f"{lezer_name} {{ {lezer_body} }}"
|
||||
# return ""
|
||||
#
|
||||
#
|
||||
# def _convert_terminal(name: str, pattern: str) -> str:
|
||||
# """Convert a Lark terminal to Lezer token format."""
|
||||
# pattern = pattern.strip()
|
||||
#
|
||||
# # Handle regex patterns
|
||||
# if pattern.startswith("/") and pattern.endswith("/"):
|
||||
# regex = pattern[1:-1]
|
||||
# # Convert to Lezer regex format
|
||||
# return f'{name} {{ ${regex}$ }}'
|
||||
#
|
||||
# # Handle string literals
|
||||
# if pattern.startswith('"') or pattern.startswith("'"):
|
||||
# return f'{name} {{ {pattern} }}'
|
||||
#
|
||||
# # Handle alternatives (literal strings separated by |)
|
||||
# if "|" in pattern:
|
||||
# alternatives = [alt.strip() for alt in pattern.split("|")]
|
||||
# if all(alt.startswith('"') or alt.startswith("'") for alt in alternatives):
|
||||
# return f'{name} {{ {" | ".join(alternatives)} }}'
|
||||
#
|
||||
# return ""
|
||||
#
|
||||
#
|
||||
# def _convert_body(body: str) -> str:
|
||||
# """Convert the body of a Lark rule to Lezer format."""
|
||||
# # Remove inline transformations (-> name)
|
||||
# body = re.sub(r"\s*->\s*\w+", "", body)
|
||||
#
|
||||
# # Convert alternatives
|
||||
# parts = []
|
||||
# for alt in body.split("|"):
|
||||
# alt = alt.strip()
|
||||
# if alt:
|
||||
# converted = _convert_sequence(alt)
|
||||
# if converted:
|
||||
# parts.append(converted)
|
||||
#
|
||||
# return " | ".join(parts)
|
||||
#
|
||||
#
|
||||
# def _convert_sequence(seq: str) -> str:
|
||||
# """Convert a sequence of items in a rule."""
|
||||
# items = []
|
||||
#
|
||||
# # Tokenize the sequence
|
||||
# tokens = re.findall(
|
||||
# r'"[^"]*"|\'[^\']*\'|/[^/]+/|\([^)]+\)|\[[^\]]+\]|[a-zA-Z_][a-zA-Z0-9_]*|\?|\*|\+',
|
||||
# seq
|
||||
# )
|
||||
#
|
||||
# for token in tokens:
|
||||
# if token.startswith('"') or token.startswith("'"):
|
||||
# # String literal
|
||||
# items.append(token)
|
||||
# elif token.startswith("("):
|
||||
# # Group
|
||||
# inner = token[1:-1]
|
||||
# items.append(f"({_convert_body(inner)})")
|
||||
# elif token.startswith("["):
|
||||
# # Optional group in Lark
|
||||
# inner = token[1:-1]
|
||||
# items.append(f"({_convert_body(inner)})?")
|
||||
# elif token in ("?", "*", "+"):
|
||||
# # Quantifiers - attach to previous item
|
||||
# if items:
|
||||
# items[-1] = items[-1] + token
|
||||
# elif token.isupper() or token.startswith("_"):
|
||||
# # Terminal reference
|
||||
# items.append(token)
|
||||
# elif token.islower() or "_" in token:
|
||||
# # Rule reference - convert to PascalCase
|
||||
# items.append(_to_pascal_case(token))
|
||||
#
|
||||
# return " ".join(items)
|
||||
#
|
||||
#
|
||||
# def _to_pascal_case(name: str) -> str:
|
||||
# """Convert snake_case to PascalCase."""
|
||||
# return "".join(word.capitalize() for word in name.split("_"))
|
||||
#
|
||||
#
|
||||
# def extract_completions_from_grammar(lark_grammar: str) -> Dict[str, List[str]]:
|
||||
# """
|
||||
# Extract completion items from a Lark grammar.
|
||||
#
|
||||
# Parses the grammar to find:
|
||||
# - Keywords (reserved words like if, not, and)
|
||||
# - Operators (==, !=, contains, etc.)
|
||||
# - Functions (style, format, etc.)
|
||||
# - Types (number, date, boolean, etc.)
|
||||
# - Literals (True, False, etc.)
|
||||
#
|
||||
# Args:
|
||||
# lark_grammar: The Lark grammar string.
|
||||
#
|
||||
# Returns:
|
||||
# Dictionary with completion categories.
|
||||
# """
|
||||
# keywords: Set[str] = set()
|
||||
# operators: Set[str] = set()
|
||||
# functions: Set[str] = set()
|
||||
# types: Set[str] = set()
|
||||
# literals: Set[str] = set()
|
||||
#
|
||||
# # Find all quoted strings (potential keywords/operators)
|
||||
# quoted_strings = re.findall(r'"([^"]+)"', lark_grammar)
|
||||
#
|
||||
# # Also look for terminal definitions with string alternatives (e.g., BOOLEAN: "True" | "False")
|
||||
# terminal_literals = re.findall(r'[A-Z_]+:\s*"([^"]+)"(?:\s*\|\s*"([^"]+)")*', lark_grammar)
|
||||
# for match in terminal_literals:
|
||||
# for literal in match:
|
||||
# if literal:
|
||||
# quoted_strings.append(literal)
|
||||
#
|
||||
# for s in quoted_strings:
|
||||
# s_lower = s.lower()
|
||||
#
|
||||
# # Classify based on pattern
|
||||
# if s in ("==", "!=", "<=", "<", ">=", ">", "+", "-", "*", "/"):
|
||||
# operators.add(s)
|
||||
# elif s_lower in ("contains", "startswith", "endswith", "in", "between", "isempty", "isnotempty"):
|
||||
# operators.add(s_lower)
|
||||
# elif s_lower in ("if", "not", "and", "or"):
|
||||
# keywords.add(s_lower)
|
||||
# elif s_lower in ("true", "false"):
|
||||
# literals.add(s)
|
||||
# elif s_lower in ("style", "format"):
|
||||
# functions.add(s_lower)
|
||||
# elif s_lower in ("column", "row", "cell", "value", "col"):
|
||||
# keywords.add(s_lower)
|
||||
# elif s_lower in ("number", "date", "boolean", "text", "enum"):
|
||||
# types.add(s_lower)
|
||||
# elif s_lower == "case":
|
||||
# keywords.add(s_lower)
|
||||
#
|
||||
# # Find function-like patterns: word "("
|
||||
# function_patterns = re.findall(r'"(\w+)"\s*"?\("', lark_grammar)
|
||||
# for func in function_patterns:
|
||||
# if func.lower() not in ("true", "false"):
|
||||
# functions.add(func.lower())
|
||||
#
|
||||
# # Find type patterns from format_type rule
|
||||
# type_match = re.search(r'format_type\s*:\s*(.+?)(?:\n\n|\Z)', lark_grammar, re.DOTALL)
|
||||
# if type_match:
|
||||
# type_strings = re.findall(r'"(\w+)"', type_match.group(1))
|
||||
# types.update(t.lower() for t in type_strings)
|
||||
#
|
||||
# return {
|
||||
# "keywords": sorted(keywords),
|
||||
# "operators": sorted(operators),
|
||||
# "functions": sorted(functions),
|
||||
# "types": sorted(types),
|
||||
# "literals": sorted(literals),
|
||||
# }
|
||||
|
||||
240
src/myfasthtml/core/dsl/lark_to_simple_mode.py
Normal file
240
src/myfasthtml/core/dsl/lark_to_simple_mode.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""
|
||||
Utilities for converting Lark grammars to CodeMirror 5 Simple Mode format.
|
||||
|
||||
This module provides functions to extract regex patterns from Lark grammar
|
||||
terminals and generate a CodeMirror Simple Mode configuration for syntax highlighting.
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Dict, List, Any
|
||||
|
||||
|
||||
def lark_to_simple_mode(lark_grammar: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert a Lark grammar to CodeMirror 5 Simple Mode configuration.
|
||||
|
||||
Extracts terminal definitions (regex patterns) from the Lark grammar and
|
||||
maps them to CodeMirror token classes for syntax highlighting.
|
||||
|
||||
Args:
|
||||
lark_grammar: The Lark grammar string.
|
||||
|
||||
Returns:
|
||||
Dictionary with Simple Mode configuration:
|
||||
{
|
||||
"start": [
|
||||
{"regex": "...", "token": "keyword"},
|
||||
{"regex": "...", "token": "string"},
|
||||
...
|
||||
]
|
||||
}
|
||||
"""
|
||||
# Extract keywords from literal strings in grammar rules
|
||||
keywords = _extract_keywords(lark_grammar)
|
||||
|
||||
# Extract terminals (regex patterns)
|
||||
terminals = _extract_terminals(lark_grammar)
|
||||
|
||||
# Build Simple Mode rules
|
||||
rules = []
|
||||
|
||||
# Comments (must come first to have priority)
|
||||
rules.append({
|
||||
"regex": r"#.*",
|
||||
"token": "comment"
|
||||
})
|
||||
|
||||
# Keywords
|
||||
if keywords:
|
||||
keyword_pattern = r"\b(?:" + "|".join(re.escape(k) for k in keywords) + r")\b"
|
||||
rules.append({
|
||||
"regex": keyword_pattern,
|
||||
"token": "keyword"
|
||||
})
|
||||
|
||||
# Terminals mapped to token types
|
||||
terminal_mappings = {
|
||||
"QUOTED_STRING": "string",
|
||||
"SIGNED_NUMBER": "number",
|
||||
"INTEGER": "number",
|
||||
"BOOLEAN": "atom",
|
||||
"CELL_ID": "variable-3",
|
||||
"NAME": "variable",
|
||||
}
|
||||
|
||||
for term_name, pattern in terminals.items():
|
||||
if term_name in terminal_mappings:
|
||||
token_type = terminal_mappings[term_name]
|
||||
js_pattern = _lark_regex_to_js(pattern)
|
||||
if js_pattern:
|
||||
rules.append({
|
||||
"regex": js_pattern,
|
||||
"token": token_type
|
||||
})
|
||||
|
||||
return {"start": rules}
|
||||
|
||||
|
||||
def _extract_keywords(grammar: str) -> List[str]:
|
||||
"""
|
||||
Extract keyword literals from grammar rules.
|
||||
|
||||
Looks for quoted string literals in rules (e.g., "column", "if", "style").
|
||||
|
||||
Args:
|
||||
grammar: The Lark grammar string.
|
||||
|
||||
Returns:
|
||||
List of keyword strings.
|
||||
"""
|
||||
keywords = set()
|
||||
|
||||
# Match quoted literals in rules (not in terminal definitions)
|
||||
# Pattern: "keyword" but not in lines like: TERMINAL: "pattern"
|
||||
lines = grammar.split("\n")
|
||||
for line in lines:
|
||||
# Skip terminal definitions (uppercase name followed by colon)
|
||||
if re.match(r'\s*[A-Z_]+\s*:', line):
|
||||
continue
|
||||
|
||||
# Skip comments
|
||||
if line.strip().startswith("//") or line.strip().startswith("#"):
|
||||
continue
|
||||
|
||||
# Find quoted strings in rules
|
||||
matches = re.findall(r'"([a-z_]+)"', line)
|
||||
for match in matches:
|
||||
# Filter out regex-like patterns, keep only identifiers
|
||||
if re.match(r'^[a-z_]+$', match):
|
||||
keywords.add(match)
|
||||
|
||||
return sorted(keywords)
|
||||
|
||||
|
||||
def _extract_terminals(grammar: str) -> Dict[str, str]:
|
||||
"""
|
||||
Extract terminal definitions from Lark grammar.
|
||||
|
||||
Args:
|
||||
grammar: The Lark grammar string.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping terminal names to their regex patterns.
|
||||
"""
|
||||
terminals = {}
|
||||
lines = grammar.split("\n")
|
||||
|
||||
for line in lines:
|
||||
# Match terminal definitions: NAME: /regex/ or NAME: "literal"
|
||||
match = re.match(r'\s*([A-Z_]+)\s*:\s*/([^/]+)/', line)
|
||||
if match:
|
||||
name, pattern = match.groups()
|
||||
terminals[name] = pattern
|
||||
continue
|
||||
|
||||
# Match literal alternatives: BOOLEAN: "True" | "False"
|
||||
match = re.match(r'\s*([A-Z_]+)\s*:\s*(.+)', line)
|
||||
if match:
|
||||
name, alternatives = match.groups()
|
||||
# Extract quoted literals
|
||||
literals = re.findall(r'"([^"]+)"', alternatives)
|
||||
if literals:
|
||||
# Build regex alternation
|
||||
pattern = "|".join(re.escape(lit) for lit in literals)
|
||||
terminals[name] = pattern
|
||||
|
||||
return terminals
|
||||
|
||||
|
||||
def _lark_regex_to_js(lark_pattern: str) -> str:
|
||||
"""
|
||||
Convert a Lark regex pattern to JavaScript regex.
|
||||
|
||||
This is a simplified converter that handles common patterns.
|
||||
Complex patterns may need manual adjustment.
|
||||
|
||||
Args:
|
||||
lark_pattern: Lark regex pattern.
|
||||
|
||||
Returns:
|
||||
JavaScript regex pattern string, or empty string if conversion fails.
|
||||
"""
|
||||
# Remove Lark-specific flags
|
||||
pattern = lark_pattern.strip()
|
||||
|
||||
# Handle common patterns
|
||||
conversions = [
|
||||
# Escape sequences
|
||||
(r'\[', r'['),
|
||||
(r'\]', r']'),
|
||||
|
||||
# Character classes are mostly compatible
|
||||
# Numbers: [0-9]+ or \d+
|
||||
# Letters: [a-zA-Z]
|
||||
# Whitespace: [ \t]
|
||||
]
|
||||
|
||||
result = pattern
|
||||
for lark_pat, js_pat in conversions:
|
||||
result = result.replace(lark_pat, js_pat)
|
||||
|
||||
# Wrap in word boundaries for identifier-like patterns
|
||||
# Example: [a-zA-Z_][a-zA-Z0-9_]* → \b[a-zA-Z_][a-zA-Z0-9_]*\b
|
||||
if re.match(r'\[[a-zA-Z_]+\]', result):
|
||||
result = r'\b' + result + r'\b'
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def generate_formatting_dsl_mode() -> Dict[str, Any]:
|
||||
"""
|
||||
Generate Simple Mode configuration for the Formatting DSL.
|
||||
|
||||
This is a specialized version with hand-tuned rules for better highlighting.
|
||||
|
||||
Returns:
|
||||
Simple Mode configuration dictionary.
|
||||
"""
|
||||
return {
|
||||
"start": [
|
||||
# Comments (highest priority)
|
||||
{"regex": r"#.*", "token": "comment"},
|
||||
|
||||
# Scope keywords
|
||||
{"regex": r"\b(?:column|row|cell)\b", "token": "keyword"},
|
||||
|
||||
# Condition keywords
|
||||
{"regex": r"\b(?:if|not|and|or|in|between|case)\b", "token": "keyword"},
|
||||
|
||||
# Built-in functions
|
||||
{"regex": r"\b(?:style|format)\b", "token": "builtin"},
|
||||
|
||||
# Format types
|
||||
{"regex": r"\b(?:number|date|boolean|text|enum)\b", "token": "builtin"},
|
||||
|
||||
# String operators (word-like)
|
||||
{"regex": r"\b(?:contains|startswith|endswith|isempty|isnotempty)\b", "token": "operator"},
|
||||
|
||||
# Comparison operators (symbols)
|
||||
{"regex": r"==|!=|<=|>=|<|>", "token": "operator"},
|
||||
|
||||
# Special references
|
||||
{"regex": r"\b(?:value|col|row|cell)\b", "token": "variable-2"},
|
||||
|
||||
# Booleans
|
||||
{"regex": r"\b(?:True|False|true|false)\b", "token": "atom"},
|
||||
|
||||
# Numbers (integers and floats, with optional sign)
|
||||
{"regex": r"[+-]?\b\d+(?:\.\d+)?\b", "token": "number"},
|
||||
|
||||
# Strings (double or single quoted)
|
||||
{"regex": r'"(?:[^\\"]|\\.)*"', "token": "string"},
|
||||
{"regex": r"'(?:[^\\']|\\.)*'", "token": "string"},
|
||||
|
||||
# Cell IDs
|
||||
{"regex": r"\btcell_[a-zA-Z0-9_-]+\b", "token": "variable-3"},
|
||||
|
||||
# Names (identifiers) - lowest priority
|
||||
{"regex": r"\b[a-zA-Z_][a-zA-Z0-9_]*\b", "token": "variable"},
|
||||
]
|
||||
}
|
||||
@@ -5,6 +5,9 @@ Provides the Lark grammar and derived completions for the
|
||||
DataGrid Formatting DSL.
|
||||
"""
|
||||
|
||||
from functools import cached_property
|
||||
from typing import Dict, Any
|
||||
|
||||
from myfasthtml.core.dsl.base import DSLDefinition
|
||||
from myfasthtml.core.formatting.dsl.grammar import GRAMMAR
|
||||
|
||||
@@ -15,9 +18,20 @@ class FormattingDSL(DSLDefinition):
|
||||
|
||||
Uses the existing Lark grammar from grammar.py.
|
||||
"""
|
||||
|
||||
|
||||
name: str = "Formatting DSL"
|
||||
|
||||
|
||||
def get_grammar(self) -> str:
|
||||
"""Return the Lark grammar for formatting DSL."""
|
||||
return GRAMMAR
|
||||
|
||||
@cached_property
|
||||
def simple_mode_config(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Return hand-tuned Simple Mode configuration for optimal highlighting.
|
||||
|
||||
Overrides the base class to use a specialized configuration
|
||||
rather than auto-generated one.
|
||||
"""
|
||||
from myfasthtml.core.dsl.lark_to_simple_mode import generate_formatting_dsl_mode
|
||||
return generate_formatting_dsl_mode()
|
||||
|
||||
125
src/myfasthtml/examples/dsl_syntax.py
Normal file
125
src/myfasthtml/examples/dsl_syntax.py
Normal file
@@ -0,0 +1,125 @@
|
||||
"""
|
||||
Test page for DSL syntax highlighting with DaisyUI theme.
|
||||
"""
|
||||
|
||||
from fasthtml.common import *
|
||||
from myfasthtml.myfastapp import create_app
|
||||
from myfasthtml.controls.DslEditor import DslEditor, DslEditorConf
|
||||
from myfasthtml.core.formatting.dsl.definition import FormattingDSL
|
||||
from myfasthtml.core.instances import RootInstance
|
||||
|
||||
# Create app
|
||||
app, rt = create_app(protect_routes=False)
|
||||
|
||||
# Sample DSL content
|
||||
SAMPLE_DSL = """# DataGrid Formatting Example
|
||||
|
||||
column amount:
|
||||
format.number(precision=2, suffix=" €", thousands_sep=" ")
|
||||
style("error") if value < 0
|
||||
style("success", bold=True) if value > 10000
|
||||
|
||||
column status:
|
||||
format.enum(source={"draft": "Draft", "pending": "Pending", "approved": "Approved"})
|
||||
style("neutral") if value == "draft"
|
||||
style("warning") if value == "pending"
|
||||
style("success") if value == "approved"
|
||||
|
||||
column progress:
|
||||
format("percentage")
|
||||
style("error") if value < 0.5
|
||||
style("warning") if value between 0.5 and 0.8
|
||||
style("success") if value > 0.8
|
||||
|
||||
row 0:
|
||||
style("neutral", bold=True)
|
||||
|
||||
cell (amount, 10):
|
||||
style("accent", bold=True)
|
||||
"""
|
||||
|
||||
|
||||
@rt("/")
|
||||
def get():
|
||||
root = RootInstance
|
||||
formatting_dsl = FormattingDSL()
|
||||
|
||||
editor = DslEditor(
|
||||
root,
|
||||
formatting_dsl,
|
||||
DslEditorConf(
|
||||
name="test_editor",
|
||||
line_numbers=True,
|
||||
autocompletion=True,
|
||||
linting=True,
|
||||
placeholder="Type your DSL code here..."
|
||||
),
|
||||
save_state=False
|
||||
)
|
||||
|
||||
editor.set_content(SAMPLE_DSL)
|
||||
|
||||
return Titled(
|
||||
"DSL Syntax Highlighting Test",
|
||||
Div(
|
||||
H1("Formatting DSL Editor", cls="text-3xl font-bold mb-4"),
|
||||
P("This editor demonstrates:", cls="mb-2"),
|
||||
Ul(
|
||||
Li("✅ DaisyUI theme integration (adapts to dark mode)"),
|
||||
Li("✅ Syntax highlighting with CodeMirror Simple Mode"),
|
||||
Li("✅ Server-side validation (try adding syntax errors)"),
|
||||
Li("✅ Server-side autocompletion (Ctrl+Space)"),
|
||||
cls="list-disc list-inside mb-4 space-y-1"
|
||||
),
|
||||
Div(
|
||||
editor,
|
||||
cls="border border-base-300 rounded-lg p-4"
|
||||
),
|
||||
Div(
|
||||
P("Theme:", cls="font-bold mb-2"),
|
||||
Select(
|
||||
Option("light", value="light"),
|
||||
Option("dark", value="dark"),
|
||||
Option("cupcake", value="cupcake"),
|
||||
Option("bumblebee", value="bumblebee"),
|
||||
Option("emerald", value="emerald"),
|
||||
Option("corporate", value="corporate"),
|
||||
Option("synthwave", value="synthwave"),
|
||||
Option("retro", value="retro"),
|
||||
Option("cyberpunk", value="cyberpunk"),
|
||||
Option("valentine", value="valentine"),
|
||||
Option("halloween", value="halloween"),
|
||||
Option("garden", value="garden"),
|
||||
Option("forest", value="forest"),
|
||||
Option("aqua", value="aqua"),
|
||||
Option("lofi", value="lofi"),
|
||||
Option("pastel", value="pastel"),
|
||||
Option("fantasy", value="fantasy"),
|
||||
Option("wireframe", value="wireframe"),
|
||||
Option("black", value="black"),
|
||||
Option("luxury", value="luxury"),
|
||||
Option("dracula", value="dracula"),
|
||||
Option("cmyk", value="cmyk"),
|
||||
Option("autumn", value="autumn"),
|
||||
Option("business", value="business"),
|
||||
Option("acid", value="acid"),
|
||||
Option("lemonade", value="lemonade"),
|
||||
Option("night", value="night"),
|
||||
Option("coffee", value="coffee"),
|
||||
Option("winter", value="winter"),
|
||||
Option("dim", value="dim"),
|
||||
Option("nord", value="nord"),
|
||||
Option("sunset", value="sunset"),
|
||||
cls="select select-bordered",
|
||||
onchange="document.documentElement.setAttribute('data-theme', this.value)"
|
||||
),
|
||||
cls="mt-4"
|
||||
),
|
||||
cls="container mx-auto p-8"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=5010)
|
||||
@@ -82,12 +82,13 @@ def create_app(daisyui: Optional[bool] = True,
|
||||
hdrs += [
|
||||
Script(src="/myfasthtml/codemirror.min.js"),
|
||||
Link(href="/myfasthtml/codemirror.min.css", rel="stylesheet", type="text/css"),
|
||||
|
||||
|
||||
Script(src="/myfasthtml/placeholder.min.js"),
|
||||
|
||||
Script(src="/myfasthtml/simple.min.js"),
|
||||
|
||||
Script(src="/myfasthtml/show-hint.min.js"),
|
||||
Link(href="/myfasthtml/show-hint.min.css", rel="stylesheet", type="text/css"),
|
||||
|
||||
|
||||
Script(src="/myfasthtml/lint.min.js"),
|
||||
Link(href="/myfasthtml/lint.min.css", rel="stylesheet", type="text/css"),
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user