8000 Switch @ for `this` since it is so common in field names (for example… · lina1/python-jsonpath-rw@d9d2a09 · GitHub
[go: up one dir, main page]

Skip to content

Commit d9d2a09

Browse files
committed
Switch @ for this since it is so common in field names (for example when JSON is generated from XML)
1 parent 5219ef9 commit d9d2a09

File tree

5 files changed

+44
-18
lines changed

5 files changed

+44
-18
lines changed

jsonpath_rw/jsonpath.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ def update(self, data, val):
198198
return val
199199

200200
def __str__(self):
201-
return '@'
201+
return '`this`'
202202
203203
def __repr__(self):
204204
return 'This()'

jsonpath_rw/lexer.py

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -37,21 +37,22 @@ def tokenize(self, string):
3737
#
3838
# Anyhow, it is pythonic to give some rope to hang oneself with :-)
3939

40-
literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&', '@']
40+
literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&']
4141

4242
reserved_words = { 'where': 'WHERE' }
4343

44-
tokens = ['DOUBLEDOT', 'NUMBER', 'ID'] + list(reserved_words.values())
44+
tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR'] + list(reserved_words.values())
4545

4646
states = [ ('singlequote', 'exclusive'),
47-
('doublequote', 'exclusive') ]
47+
('doublequote', 'exclusive'),
48+
('backquote', 'exclusive') ]
4849

4950
# Normal lexing, rather easy
5051
t_DOUBLEDOT = r'\.\.'
5152
t_ignore = ' \t'
5253

5354
def t_ID(self, t):
54-
r'[a-zA-Z_][a-zA-Z0-9_]*'
55+
r'[a-zA-Z_@][a-zA-Z0-9_@]*'
5556
t.type = self.reserved_words.get(t.value, 'ID')
5657
return t
5758

@@ -95,6 +96,25 @@ def t_doublequote_DOUBLEQUOTE(self, t):
9596
def t_doublequote_error(self, t):
9697
raise Exception('Error on line %s, col %s while lexing doublequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.latest_newline, t.value[0]))
9798

99+
100+
# Back-quoted "magic" operators
101+
t_backquote_ignore = ''
102+
def t_BACKQUOTE(self, t):
103+
r'`'
104+
t.lexer.string_start = t.lexer.lexpos
105+
t.lexer.push_state('backquote')
106+
107+
def t_backquote_BACKQUOTE(self, t):
108+
r'([^`]|\\`)*`'
109+
t.value = t.value[:-1]
110+
t.type = 'NAMED_OPERATOR'
111+
t.lexer.pop_state()
112+
return t
113+
114+
def t_backquote_error(self, t):
115+
raise Exception('Error on line %s, col %s while lexing backquoted operator: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.latest_newline, t.value[0]))
116+
117+
98118
# Counting lines, handling errors
99119
def t_newline(self, t):
100120
r'\n'

jsonpath_rw/parser.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,12 @@ def p_jsonpath_fields(self, p):
8484
"jsonpath : fields_or_any"
8585
p[0] = Fields(*p[1])
8686

87-
def p_jsonpath_this(self, p):
88-
"jsonpath : '@'"
89-
p[0] = This()
87+
def p_jsonpath_named_operator(self, p):
88+
"jsonpath : NAMED_OPERATOR"
89+
if p[1] == 'this':
90+
p[0] = This()
91+
else:
92+
raise Exception('Unknown named operator `%s` at %s:%s' % (t.value, t.lineno, t.col))
9093

9194
def p_jsonpath_root(self, p):
9295
"jsonpath : '$'"

tests/test_jsonpath.py

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -103,10 +103,11 @@ def test_fields_value(self):
103103
jsonpath.auto_id_field = None
104104
self.check_cases([ ('foo', {'foo': 'baz'}, ['baz']),
105105
('foo,baz', {'foo': 1, 'baz': 2}, [1, 2]),
106+
('@foo', {'@foo': 1}, [1]),
106107
('*', {'foo': 1, 'baz': 2}, set([1, 2])) ])
107108

108109
jsonpath.auto_id_field = 'id'
109-
self.check_cases([ ('*', {'foo': 1, 'baz': 2}, set([1, 2, '@'])) ])
110+
self.check_cases([ ('*', {'foo': 1, 'baz': 2}, set([1, 2, '`this`'])) ])
110111

111112
def test_root_value(self):
112113
jsonpath.auto_id_field = None
@@ -119,9 +120,9 @@ def test_root_value(self):
119120
def test_this_value(self):
120121
jsonpath.auto_id_field = None
121122
self.check_cases([
122-
('@', {'foo': 'baz'}, [{'foo':'baz'}]),
123-
('foo.@', {'foo': 'baz'}, ['baz']),
124-
('foo.@.baz', {'foo': {'baz': 3}}, [3]),
123+
('`this`', {'foo': 'baz'}, [{'foo':'baz'}]),
124+
('foo.`this`', {'foo': 'baz'}, ['baz']),
125+
('foo.`this`.baz', {'foo': {'baz': 3}}, [3]),
125126
])
126127

127128
def test_index_value(self):
@@ -194,9 +195,9 @@ def test_root_paths(self):
194195
def test_this_paths(self):
195196
jsonpath.auto_id_field = None
196197
self.check_paths([
197-
('@', {'foo': 'baz'}, ['@']),
198-
('foo.@', {'foo': 'baz'}, ['foo']),
199-
('foo.@.baz', {'foo': {'baz': 3}}, ['foo.baz']),
198+
('`this`', {'foo': 'baz'}, ['`this`']),
199+
('foo.`this`', {'foo': 'baz'}, ['foo']),
200+
('foo.`this`.baz', {'foo': {'baz': 3}}, ['foo.baz']),
200201
])
201202

202203
def test_index_paths(self):
@@ -240,9 +241,9 @@ def test_root_auto_id(self):
240241
def test_this_auto_id(self):
241242
jsonpath.auto_id_field = 'id'
242243
self.check_cases([
243-
('id', {'foo': 'baz'}, ['@']), # This is, again, a wonky case that is not that interesting
244-
('foo.@.id', {'foo': 'baz'}, ['foo']),
245-
('foo.@.baz.id', {'foo': {'baz': 3}}, ['foo.baz']),
244+
('id', {'foo': 'baz'}, ['`this`']), # This is, again, a wonky case that is not that interesting
245+
('foo.`this`.id', {'foo': 'baz'}, ['foo']),
246+
('foo.`this`.baz.id', {'foo': {'baz': 3}}, ['foo.baz']),
246247
])
247248

248249
def test_index_auto_id(self):

tests/test_lexer.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,5 +43,7 @@ def test_simple_inputs(self):
4343
self.assert_lex_equiv('fuzz.*', [self.token('fuzz', 'ID'), self.token('.', '.'), self.token('*', '*')])
4444
self.assert_lex_equiv('fuzz..bang', [self.token('fuzz', 'ID'), self.token('..', 'DOUBLEDOT'), self.token('bang', 'ID')])
4545
self.assert_lex_equiv('&', [self.token('&', '&')])
46+
self.assert_lex_equiv('@', [self.token('@', 'ID')])
47+
self.assert_lex_equiv('`this`', [self.token('this', 'NAMED_OPERATOR')])
4648
self.assert_lex_equiv('|', [self.token('|', '|')])
4749
self.assert_lex_equiv('where', [self.token('where', 'WHERE')])

0 commit comments

Comments
 (0)
0