From 26d1b3f72e7358eefd35251410681ab2894fb7e3 Mon Sep 17 00:00:00 2001 From: Kodi Arfer Date: Fri, 16 Jun 2017 13:49:10 -0700 Subject: [PATCH] Don't parse 5.attr or :foo.attr as symbols --- NEWS | 5 +++++ hy/lex/parser.py | 24 +++++++++++++++++++----- tests/native_tests/language.hy | 1 - tests/test_lex.py | 15 +++++++++++++++ 4 files changed, 39 insertions(+), 6 deletions(-) diff --git a/NEWS b/NEWS index e96cd69..1a6c3d5 100644 --- a/NEWS +++ b/NEWS @@ -3,6 +3,11 @@ Changes from 0.13.0 [ Language Changes ] * Single-character "sharp macros" changed to "tag macros", which can have longer names + * Periods are no longer allowed in keywords + + [ Bug Fixes ] + * Numeric literals are no longer parsed as symbols when followed by a dot + and a symbol Changes from 0.12.1 diff --git a/hy/lex/parser.py b/hy/lex/parser.py index aa579e3..1be896b 100755 --- a/hy/lex/parser.py +++ b/hy/lex/parser.py @@ -288,6 +288,24 @@ def t_partial_string(p): def t_identifier(p): obj = p[0].value + val = symbol_like(obj) + if val is not None: + return val + + if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None: + # E.g., `5.attr` or `:foo.attr` + raise LexException( + 'Cannot access attribute on anything other than a name (in ' + 'order to get attributes of expressions, use ' + '`(. )` or `(. )`)', + p[0].source_pos.lineno, p[0].source_pos.colno) + + return HySymbol(".".join(hy_symbol_mangle(x) for x in obj.split("."))) + + +def symbol_like(obj): + "Try to interpret `obj` as a number or keyword." + try: return HyInteger(obj) except ValueError: @@ -312,13 +330,9 @@ def t_identifier(p): except ValueError: pass - if obj.startswith(":"): + if obj.startswith(":") and "." not in obj: return HyKeyword(obj) - obj = ".".join([hy_symbol_mangle(part) for part in obj.split(".")]) - - return HySymbol(obj) - @pg.error def error_handler(token): diff --git a/tests/native_tests/language.hy b/tests/native_tests/language.hy index 49b5305..5b2421f 100644 --- a/tests/native_tests/language.hy +++ b/tests/native_tests/language.hy @@ -1446,7 +1446,6 @@ (assert (= (keyword 'foo) :foo)) (assert (= (keyword 'foo-bar) :foo-bar)) (assert (= (keyword 1) :1)) - (assert (= (keyword 1.0) :1.0)) (assert (= (keyword :foo_bar) :foo-bar))) (defn test-name-conversion [] diff --git a/tests/test_lex.py b/tests/test_lex.py index de49076..e5f4322 100644 --- a/tests/test_lex.py +++ b/tests/test_lex.py @@ -129,6 +129,21 @@ def test_lex_digit_separators(): [HySymbol(",,,,___,__1__,,__,,2__,q,__")]) +def test_lex_bad_attrs(): + with lexe(): tokenize("1.foo") + with lexe(): tokenize("0.foo") + with lexe(): tokenize("1.5.foo") + with lexe(): tokenize("1e3.foo") + with lexe(): tokenize("5j.foo") + with lexe(): tokenize("3+5j.foo") + with lexe(): tokenize("3.1+5.1j.foo") + assert tokenize("j.foo") + with lexe(): tokenize("3/4.foo") + assert tokenize("a/1.foo") + assert tokenize("1/a.foo") + with lexe(): tokenize(":hello.foo") + + def test_lex_line_counting(): """ Make sure we can count lines / columns """ entry = tokenize("(foo (one two))")[0]