Skip to content

Commit

Permalink
Add soft keywords (GH-20370)
Browse files Browse the repository at this point in the history
These are like keywords but they only work in context; they are not reserved except when there is an exact match.

This would enable things like match statements without reserving `match` (which would be bad for the `re.match()` function and probably lots of other places).

Automerge-Triggered-By: @gvanrossum
  • Loading branch information
gvanrossum committed May 26, 2020
1 parent 578c395 commit b45af1a
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 4 deletions.
30 changes: 30 additions & 0 deletions Lib/test/test_peg_generator/test_c_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,3 +402,33 @@ def test_error_in_rules(self) -> None:
parse.parse_string("a", mode=0)
"""
self.run_test(grammar_source, test_source)

def test_no_soft_keywords(self) -> None:
grammar_source = """
start: expr+ NEWLINE? ENDMARKER
expr: 'foo'
"""
grammar = parse_string(grammar_source, GrammarParser)
parser_source = generate_c_parser_source(grammar)
assert "expect_soft_keyword" not in parser_source

def test_soft_keywords(self) -> None:
grammar_source = """
start: expr+ NEWLINE? ENDMARKER
expr: "foo"
"""
grammar = parse_string(grammar_source, GrammarParser)
parser_source = generate_c_parser_source(grammar)
assert "expect_soft_keyword" in parser_source

def test_soft_keywords_parse(self) -> None:
grammar_source = """
start: "if" expr '+' expr NEWLINE
expr: NAME
"""
test_source = """
valid_cases = ["if if + if"]
invalid_cases = ["if if"]
self.check_input_strings_for_grammar(valid_cases, invalid_cases)
"""
self.run_test(grammar_source, test_source)
24 changes: 24 additions & 0 deletions Parser/pegen/pegen.c
Original file line number Diff line number Diff line change
Expand Up @@ -753,6 +753,30 @@ _PyPegen_expect_token(Parser *p, int type)
return t;
}

expr_ty
_PyPegen_expect_soft_keyword(Parser *p, const char *keyword)
{
if (p->mark == p->fill) {
if (_PyPegen_fill_token(p) < 0) {
p->error_indicator = 1;
return NULL;
}
}
Token *t = p->tokens[p->mark];
if (t->type != NAME) {
return NULL;
}
char* s = PyBytes_AsString(t->bytes);
if (!s) {
return NULL;
}
if (strcmp(s, keyword) != 0) {
return NULL;
}
expr_ty res = _PyPegen_name_token(p);
return res;
}

Token *
_PyPegen_get_last_nonnwhitespace_token(Parser *p)
{
Expand Down
1 change: 1 addition & 0 deletions Parser/pegen/pegen.h
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ int _PyPegen_lookahead_with_int(int, Token *(func)(Parser *, int), Parser *, int
int _PyPegen_lookahead(int, void *(func)(Parser *), Parser *);

Token *_PyPegen_expect_token(Parser *p, int type);
expr_ty _PyPegen_expect_soft_keyword(Parser *p, const char *keyword);
Token *_PyPegen_get_last_nonnwhitespace_token(Parser *);
int _PyPegen_fill_token(Parser *p);
expr_ty _PyPegen_name_token(Parser *p);
Expand Down
24 changes: 20 additions & 4 deletions Tools/peg_generator/pegen/c_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,16 @@ def keyword_helper(self, keyword: str) -> FunctionCall:
comment=f"token='{keyword}'",
)

def soft_keyword_helper(self, value: str) -> FunctionCall:
return FunctionCall(
assigned_variable="_keyword",
function="_PyPegen_expect_soft_keyword",
arguments=["p", value],
return_type="expr_ty",
nodetype=NodeTypes.NAME_TOKEN,
comment=f"soft_keyword='{value}'",
)

def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall:
name = node.value
if name in self.non_exact_tokens:
Expand Down Expand Up @@ -154,7 +164,10 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall:
def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall:
val = ast.literal_eval(node.value)
if re.match(r"[a-zA-Z_]\w*\Z", val): # This is a keyword
return self.keyword_helper(val)
if node.value.endswith("'"):
return self.keyword_helper(val)
else:
return self.soft_keyword_helper(node.value)
else:
assert val in self.exact_tokens, f"{node.value} is not a known literal"
type = self.exact_tokens[val]
Expand Down Expand Up @@ -656,8 +669,9 @@ def handle_alt_normal(self, node: Alt, is_gather: bool, rulename: Optional[str])
self.print("{")
# We have parsed successfully all the conditions for the option.
with self.indent():
node_str = str(node).replace('"', '\\"')
self.print(
f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node}"));'
f'D(fprintf(stderr, "%*c+ {rulename}[%d-%d]: %s succeeded!\\n", p->level, \' \', _mark, p->mark, "{node_str}"));'
)
# Prepare to emmit the rule action and do so
if node.action and "EXTRA" in node.action:
Expand Down Expand Up @@ -710,8 +724,9 @@ def visit_Alt(
self.print(f"{{ // {node}")
with self.indent():
self._check_for_errors()
node_str = str(node).replace('"', '\\"')
self.print(
f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node}"));'
f'D(fprintf(stderr, "%*c> {rulename}[%d-%d]: %s\\n", p->level, \' \', _mark, p->mark, "{node_str}"));'
)
# Prepare variable declarations for the alternative
vars = self.collect_vars(node)
Expand All @@ -733,9 +748,10 @@ def visit_Alt(
self.handle_alt_normal(node, is_gather, rulename)

self.print("p->mark = _mark;")
node_str = str(node).replace('"', '\\"')
self.print(
f"D(fprintf(stderr, \"%*c%s {rulename}[%d-%d]: %s failed!\\n\", p->level, ' ',\n"
f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node}"));'
f' p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "{node_str}"));'
)
if "_cut_var" in vars:
self.print("if (_cut_var) {")
Expand Down

0 comments on commit b45af1a

Please sign in to comment.