diff --git a/src/parsers/DefaultParser.py b/src/parsers/DefaultParser.py index f6a9405..8feb83f 100644 --- a/src/parsers/DefaultParser.py +++ b/src/parsers/DefaultParser.py @@ -312,7 +312,12 @@ class DefaultParser(BaseParser): if TokenKind.NEWLINE in [t.type for t in name_tokens]: self.add_error(SyntaxErrorNode(tokens_found_by_parts[Keywords.CONCEPT], "Newline are not allowed in name.")) - name_node = NameNode(name_tokens[name_first_token_index:]) # skip the first token + tokens = name_tokens[name_first_token_index:] + stripped = core.utils.strip_tokens(tokens) + if len(stripped) == 1 and stripped[0].type == TokenKind.STRING and stripped[0].value[0] == '"': + tokens = list(Tokenizer(stripped[0].strip_quote, yield_eof=False)) + + name_node = NameNode(tokens) # skip the first token return name_node def get_concept_definition(self, current_concept_def, tokens_found_by_parts): diff --git a/tests/parsers/test_DefaultParser.py b/tests/parsers/test_DefaultParser.py index 8e1e705..0ac7163 100644 --- a/tests/parsers/test_DefaultParser.py +++ b/tests/parsers/test_DefaultParser.py @@ -311,6 +311,18 @@ def concept add one to a as assert not parser.has_error + @pytest.mark.parametrize("text", [ + 'def concept "def concept x"', + 'def concept "def concept x" as x', + ]) + def test_i_can_use_double_quotes_to_protect_keywords(self, text): + sheerka, context, parser = self.init_parser() + res = parser.parse(context, ParserInput(text)) + concept_defined = res.value.value + + assert res.status + assert concept_defined.name.tokens == list(Tokenizer("def concept x", yield_eof=False)) + @pytest.mark.parametrize("text", [ "def concept name from bnf as here is my body", "def concept name from def as here is my body", @@ -380,20 +392,6 @@ def concept add one to a as assert context.sheerka.isinstance(res.value, BuiltinConcepts.NOT_FOR_ME) assert isinstance(res.value.body[0], CannotHandleErrorNode) - # def test_i_can_parse_is_a(self): - # text = "the name of my 'concept' isa the name of the set" - # sheerka, context, parser = self.init_parser() - # res = parser.parse(context, ParserInput(text)) - # expected = IsaConceptNode([], - # concept=NameNode(list(Tokenizer("the name of my 'concept'"))), - # set=NameNode(list(Tokenizer("the name of the set")))) - # - # assert res.status - # assert res.who == parser.name - # assert res.value.source == text - # assert isinstance(res.value, ParserResultConcept) - # assert res.value.value == expected - @pytest.mark.parametrize("text", [ "def", "def concept_name"