Don't parse 5.attr or :foo.attr as symbols

This commit is contained in:
Kodi Arfer 2017-06-16 13:49:10 -07:00
parent c8736ebd0c
commit 26d1b3f72e
4 changed files with 39 additions and 6 deletions

5
NEWS
View File

@ -3,6 +3,11 @@ Changes from 0.13.0
[ Language Changes ]
* Single-character "sharp macros" changed to "tag macros", which can have
longer names
* Periods are no longer allowed in keywords
[ Bug Fixes ]
* Numeric literals are no longer parsed as symbols when followed by a dot
and a symbol
Changes from 0.12.1

View File

@ -288,6 +288,24 @@ def t_partial_string(p):
def t_identifier(p):
obj = p[0].value
val = symbol_like(obj)
if val is not None:
return val
if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None:
# E.g., `5.attr` or `:foo.attr`
raise LexException(
'Cannot access attribute on anything other than a name (in '
'order to get attributes of expressions, use '
'`(. <expression> <attr>)` or `(.<attr> <expression>)`)',
p[0].source_pos.lineno, p[0].source_pos.colno)
return HySymbol(".".join(hy_symbol_mangle(x) for x in obj.split(".")))
def symbol_like(obj):
"Try to interpret `obj` as a number or keyword."
try:
return HyInteger(obj)
except ValueError:
@ -312,13 +330,9 @@ def t_identifier(p):
except ValueError:
pass
if obj.startswith(":"):
if obj.startswith(":") and "." not in obj:
return HyKeyword(obj)
obj = ".".join([hy_symbol_mangle(part) for part in obj.split(".")])
return HySymbol(obj)
@pg.error
def error_handler(token):

View File

@ -1446,7 +1446,6 @@
(assert (= (keyword 'foo) :foo))
(assert (= (keyword 'foo-bar) :foo-bar))
(assert (= (keyword 1) :1))
(assert (= (keyword 1.0) :1.0))
(assert (= (keyword :foo_bar) :foo-bar)))
(defn test-name-conversion []

View File

@ -129,6 +129,21 @@ def test_lex_digit_separators():
[HySymbol(",,,,___,__1__,,__,,2__,q,__")])
def test_lex_bad_attrs():
with lexe(): tokenize("1.foo")
with lexe(): tokenize("0.foo")
with lexe(): tokenize("1.5.foo")
with lexe(): tokenize("1e3.foo")
with lexe(): tokenize("5j.foo")
with lexe(): tokenize("3+5j.foo")
with lexe(): tokenize("3.1+5.1j.foo")
assert tokenize("j.foo")
with lexe(): tokenize("3/4.foo")
assert tokenize("a/1.foo")
assert tokenize("1/a.foo")
with lexe(): tokenize(":hello.foo")
def test_lex_line_counting():
""" Make sure we can count lines / columns """
entry = tokenize("(foo (one two))")[0]