Page Menu
Home
Phorge
Search
Configure Global Search
Log In
Files
F13480122
D249.1764989544.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Award Token
Flag For Later
Size
9 KB
Referenced Files
None
Subscribers
None
D249.1764989544.diff
View Options
diff --git a/lilybuild/lilybuild/ci_syntax/rules.py b/lilybuild/lilybuild/ci_syntax/rules.py
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/ci_syntax/rules.py
@@ -0,0 +1,122 @@
+
+import re
+
+# XXX: Since we are stuck with Python 3.9, we can't disable backtrack yet.
+# https://docs.python.org/3/library/re.html
+patterns = {
+ 'OP': re.compile(r'\s*(==|\|\||&&|=~|\!=|\!~|\!)'),
+ 'PAREN_LEFT': re.compile(r'\s*\('),
+ 'PAREN_RIGHT': re.compile(r'\s*\)'),
+ 'STR_DOUBLE': re.compile(r'''\s*"((?:[^\\"]|\\.)*)"'''),
+ 'STR_SINGLE': re.compile(r"""\s*'((?:[^\\']|\\.)*)'"""),
+ 'VAR': re.compile(r'\s*\$([A-Za-z_][A-Za-z0-9_]*)'),
+ 'NULL': re.compile(r'\s*null'),
+ 'END': re.compile(r'\s*$'),
+}
+
+ops = {
+ '==': (2, lambda a, b: a == b),
+ '!=': (2, lambda a, b: a != b),
+ '=~': (2, lambda a, b: False),
+ '!~': (2, lambda a, b: False),
+ '!': (1, lambda a: not a),
+ '&&': (2, lambda a, b: a and b),
+ '||': (2, lambda a, b: a or b),
+}
+TERM_PRECEDENCE = 9
+PAREN_PRECEDENCE = 8
+
+def is_term(token):
+ return token[0] in ['STR_DOUBLE', 'STR_SINGLE', 'VAR', 'NULL']
+
+def get_precedence(token):
+ if is_term(token):
+ return TERM_PRECEDENCE
+ elif token[0] in ['PAREN_LEFT', 'PAREN_RIGHT']:
+ return PAREN_PRECEDENCE
+ elif token[0] == 'OP' and (token[1][0] in ['==', '!=', '=~', '!~']):
+ return 6
+ elif token[0] == 'OP' and (token[1][0] in ['!']):
+ return 5
+ elif token[0] == 'OP' and (token[1][0] in ['&&']):
+ return 4
+ elif token[0] == 'OP' and (token[1][0] in ['||']):
+ return 3
+ raise SyntaxError('Unknown token')
+
+def tokenize_rule(rule_str):
+ pos = 0
+ tokenized = []
+ while not (len(tokenized) and tokenized[-1][0] == 'END'):
+ match = None
+ for t in patterns:
+ regex = patterns[t]
+ match = regex.match(rule_str, pos)
+ if match:
+ tokenized.append((t, match.groups()))
+ pos = match.end()
+ break
+ if not match:
+ raise SyntaxError(f'Bad rule "{rule_str}", at pos {pos}')
+ return tokenized[:-1]
+
+def make_tree(tokenized):
+ # https://en.wikipedia.org/wiki/Shunting_yard_algorithm
+ stack = []
+ res = []
+ for t in tokenized:
+ if is_term(t):
+ res.append(t)
+ elif t[0] == 'OP':
+ if len(stack) and stack[-1][0] == 'OP' and get_precedence(stack[-1]) >= get_precedence(t):
+ res.append(stack.pop())
+ stack.append(t)
+ elif t[0] == 'PAREN_LEFT':
+ stack.append(t)
+ elif t[0] == 'PAREN_RIGHT':
+ while len(stack) and stack[-1][0] != 'PAREN_LEFT':
+ res.append(stack.pop())
+ if not len(stack):
+ raise SyntaxError('Mismatched parentheses')
+ stack.pop()
+ while len(stack):
+ t = stack.pop()
+ if t[0] == 'PAREN_LEFT':
+ raise SyntaxError('Mismatched parentheses')
+ res.append(t)
+
+ stack = []
+ for t in res:
+ if t[0] == 'OP':
+ opname = t[1][0]
+ arity = ops[opname][0]
+ if len(stack) < arity:
+ raise SyntaxError('Missing operands')
+ operands = tuple(reversed([stack.pop() for i in range(arity)]))
+ stack.append((t[0], (opname,) + operands))
+ else:
+ stack.append(t)
+ if len(stack) != 1:
+ raise SyntaxError('Too many operands')
+ return stack[0]
+
+def parse_rule(rule_str):
+ return make_tree(tokenize_rule(rule_str))
+
+backslash_re = re.compile(r'\\(.)')
+def replace_backslash(match):
+ c = match.groups()[0]
+ return c
+
+def evaluate_rule(expr, variables):
+ if expr[0] == 'VAR':
+ return variables.get(expr[1][0])
+ elif expr[0] == 'STR_DOUBLE' or expr[0] == 'STR_SINGLE':
+ return backslash_re.sub(replace_backslash, expr[1][0])
+ elif expr[0] == 'NULL':
+ return None
+ elif expr[0] == 'OP':
+ opname = expr[1][0]
+ operands = [evaluate_rule(o, variables) for o in expr[1][1:]]
+ return ops[opname][1](*operands)
+ raise SyntaxError(f'Cannot evaluate expression {expr}')
diff --git a/lilybuild/lilybuild/tests/ci_syntax/rules_test.py b/lilybuild/lilybuild/tests/ci_syntax/rules_test.py
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/tests/ci_syntax/rules_test.py
@@ -0,0 +1,118 @@
+
+import unittest
+from lilybuild.ci_syntax.rules import tokenize_rule, make_tree, parse_rule, evaluate_rule
+
+class RulesTest(unittest.TestCase):
+ def test_tokenize(self):
+ self.assertEqual(tokenize_rule("$VAR_FOO == ''"), [
+ ('VAR', ('VAR_FOO',)),
+ ('OP', ('==',)),
+ ('STR_SINGLE', ('',)),
+ ])
+ self.assertEqual(tokenize_rule(r'$VAR_FOO == "xxx\""'), [
+ ('VAR', ('VAR_FOO',)),
+ ('OP', ('==',)),
+ ('STR_DOUBLE', ('xxx\\"',)),
+ ])
+ self.assertEqual(tokenize_rule(r'!($VAR_FOO == "xxx\"")'), [
+ ('OP', ('!',)),
+ ('PAREN_LEFT', ()),
+ ('VAR', ('VAR_FOO',)),
+ ('OP', ('==',)),
+ ('STR_DOUBLE', ('xxx\\"',)),
+ ('PAREN_RIGHT', ()),
+ ])
+ with self.assertRaises(SyntaxError) as m:
+ tokenize_rule(r'$VAR_FOO =/= "foo"')
+ with self.assertRaises(SyntaxError) as m:
+ tokenize_rule(r'$VAR_FOO > "foo"')
+ # bareword
+ with self.assertRaises(SyntaxError) as m:
+ tokenize_rule(r'$VAR_FOO == foo')
+ # unterminated strings
+ with self.assertRaises(SyntaxError) as m:
+ tokenize_rule(r'$VAR_FOO == "foo\"')
+ with self.assertRaises(SyntaxError) as m:
+ tokenize_rule(r'$VAR_FOO == "foo')
+ with self.assertRaises(SyntaxError) as m:
+ tokenize_rule(r'$VAR_FOO == "foo\\\"')
+
+ def test_make_tree(self):
+ self.assertEqual(make_tree(tokenize_rule(r'($FOO)')), ('VAR', ('FOO',)))
+ self.assertEqual(make_tree(tokenize_rule(r'($FOO=="foo(") || "xxx"')), (
+ 'OP', (
+ '||',
+ ('OP', ('==', ('VAR', ('FOO',)), ('STR_DOUBLE', ('foo(',)))),
+ ('STR_DOUBLE', ('xxx',)),
+ )
+ ))
+ self.assertEqual(make_tree(tokenize_rule(r'$FOO=="foo("|| "xxx"')), (
+ 'OP', (
+ '||',
+ ('OP', ('==', ('VAR', ('FOO',)), ('STR_DOUBLE', ('foo(',)))),
+ ('STR_DOUBLE', ('xxx',)),
+ )
+ ))
+ self.assertEqual(make_tree(tokenize_rule(r'$FOO==("foo("|| "xxx")')), (
+ 'OP', (
+ '==',
+ ('VAR', ('FOO',)),
+ ('OP', ('||', ('STR_DOUBLE', ('foo(',)), ('STR_DOUBLE', ('xxx',)))),
+ )
+ ))
+ self.assertEqual(make_tree(tokenize_rule(r'!$FOO==("foo("|| "xxx")')), (
+ 'OP', (
+ '!',
+ ('OP', (
+ '==',
+ ('VAR', ('FOO',)),
+ ('OP', ('||', ('STR_DOUBLE', ('foo(',)), ('STR_DOUBLE', ('xxx',)))),
+ ))
+ )
+ ))
+ with self.assertRaises(SyntaxError) as m:
+ make_tree(tokenize_rule(r'())'))
+ self.assertEqual(str(m.exception), 'Mismatched parentheses')
+ with self.assertRaises(SyntaxError) as m:
+ make_tree(tokenize_rule(r'('))
+ self.assertEqual(str(m.exception), 'Mismatched parentheses')
+ with self.assertRaises(SyntaxError) as m:
+ make_tree(tokenize_rule(r'("("))'))
+ self.assertEqual(str(m.exception), 'Mismatched parentheses')
+
+ with self.assertRaises(SyntaxError) as m:
+ make_tree(tokenize_rule(r'$FOO== != "0"'))
+ self.assertEqual(str(m.exception), 'Missing operands')
+
+ with self.assertRaises(SyntaxError) as m:
+ make_tree(tokenize_rule(r'( == "1" && "0" || "0")'))
+ self.assertEqual(str(m.exception), 'Missing operands')
+
+ with self.assertRaises(SyntaxError) as m:
+ make_tree(tokenize_rule(r'$VAR( $FOO == "0")'))
+ self.assertEqual(str(m.exception), 'Too many operands')
+
+ def test_eval(self):
+ self.assertEqual(evaluate_rule(parse_rule("$VAR_FOO == ''"), {}), False)
+ self.assertEqual(evaluate_rule(parse_rule("$VAR_FOO == null"), {}), True)
+ self.assertEqual(evaluate_rule(parse_rule("$VAR_FOO == 'xx'"), {}), False)
+ self.assertEqual(evaluate_rule(parse_rule("$VAR_FOO == 'xx'"), { 'VAR_FOO': 'xx' }), True)
+ self.assertEqual(
+ evaluate_rule(parse_rule("$VAR_FOO == 'xx' && $BAR"), {
+ 'VAR_FOO': 'xx',
+ 'BAR': 'yyy',
+ }), 'yyy')
+ self.assertEqual(
+ evaluate_rule(parse_rule("$VAR_FOO == 'xx' && !$BAR"), {
+ 'VAR_FOO': 'xx',
+ 'BAR': 'yyy',
+ }), False)
+ self.assertEqual(
+ evaluate_rule(parse_rule("$VAR_FOO == 'xx' && $Z"), {
+ 'VAR_FOO': 'xx',
+ 'BAR': 'yyy',
+ }), None)
+ self.assertEqual(
+ evaluate_rule(parse_rule(r"$VAR_FOO == 'x\a\\\'x'"), {
+ 'VAR_FOO': r"xa\'x",
+ }), True)
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Fri, Dec 5, 6:52 PM (23 h, 20 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
697336
Default Alt Text
D249.1764989544.diff (9 KB)
Attached To
Mode
D249: Add rules parser
Attached
Detach File
Event Timeline
Log In to Comment