From c8736ebd0c0268f099475b0595a02a1ff361a030 Mon Sep 17 00:00:00 2001 From: Kodi Arfer Date: Tue, 9 May 2017 18:28:56 -0400 Subject: [PATCH 1/2] Use pytest.raises in test_lex --- Makefile | 2 +- tests/test_lex.py | 49 ++++++++++++----------------------------------- 2 files changed, 13 insertions(+), 38 deletions(-) diff --git a/Makefile b/Makefile index 1b618a7..4658b6f 100644 --- a/Makefile +++ b/Makefile @@ -46,7 +46,7 @@ tox: venv tox flake: - flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E305 + flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E302,E305,E701 clear: clear diff --git a/tests/test_lex.py b/tests/test_lex.py index 33162e7..de49076 100644 --- a/tests/test_lex.py +++ b/tests/test_lex.py @@ -5,56 +5,31 @@ from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol, HyString, HyDict, HyList, HySet, HyCons) from hy.lex import LexException, PrematureEndOfInput, tokenize +import pytest + +def peoi(): return pytest.raises(PrematureEndOfInput) +def lexe(): return pytest.raises(LexException) def test_lex_exception(): """ Ensure tokenize throws a fit on a partial input """ - try: - tokenize("(foo") - assert True is False - except PrematureEndOfInput: - pass - try: - tokenize("{foo bar") - assert True is False - except PrematureEndOfInput: - pass - try: - tokenize("(defn foo [bar]") - assert True is False - except PrematureEndOfInput: - pass - try: - tokenize("(foo \"bar") - assert True is False - except PrematureEndOfInput: - pass + with peoi(): tokenize("(foo") + with peoi(): tokenize("{foo bar") + with peoi(): tokenize("(defn foo [bar]") + with peoi(): tokenize("(foo \"bar") def test_unbalanced_exception(): """Ensure the tokenization fails on unbalanced expressions""" - try: - tokenize("(bar))") - assert True is False - except LexException: - pass - - try: - tokenize("(baz [quux]])") - assert True is False - except LexException: - pass + with lexe(): tokenize("(bar))") + with lexe(): tokenize("(baz [quux]])") def test_lex_single_quote_err(): "Ensure tokenizing \"' \" throws a LexException that can be stringified" # https://github.com/hylang/hy/issues/1252 - try: - tokenize("' ") - except LexException as e: - assert "Could not identify the next token" in str(e) - else: - assert False + with lexe() as e: tokenize("' ") + assert "Could not identify the next token" in str(e.value) def test_lex_expression_symbols(): From 26d1b3f72e7358eefd35251410681ab2894fb7e3 Mon Sep 17 00:00:00 2001 From: Kodi Arfer Date: Fri, 16 Jun 2017 13:49:10 -0700 Subject: [PATCH 2/2] Don't parse 5.attr or :foo.attr as symbols --- NEWS | 5 +++++ hy/lex/parser.py | 24 +++++++++++++++++++----- tests/native_tests/language.hy | 1 - tests/test_lex.py | 15 +++++++++++++++ 4 files changed, 39 insertions(+), 6 deletions(-) diff --git a/NEWS b/NEWS index e96cd69..1a6c3d5 100644 --- a/NEWS +++ b/NEWS @@ -3,6 +3,11 @@ Changes from 0.13.0 [ Language Changes ] * Single-character "sharp macros" changed to "tag macros", which can have longer names + * Periods are no longer allowed in keywords + + [ Bug Fixes ] + * Numeric literals are no longer parsed as symbols when followed by a dot + and a symbol Changes from 0.12.1 diff --git a/hy/lex/parser.py b/hy/lex/parser.py index aa579e3..1be896b 100755 --- a/hy/lex/parser.py +++ b/hy/lex/parser.py @@ -288,6 +288,24 @@ def t_partial_string(p): def t_identifier(p): obj = p[0].value + val = symbol_like(obj) + if val is not None: + return val + + if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None: + # E.g., `5.attr` or `:foo.attr` + raise LexException( + 'Cannot access attribute on anything other than a name (in ' + 'order to get attributes of expressions, use ' + '`(. )` or `(. )`)', + p[0].source_pos.lineno, p[0].source_pos.colno) + + return HySymbol(".".join(hy_symbol_mangle(x) for x in obj.split("."))) + + +def symbol_like(obj): + "Try to interpret `obj` as a number or keyword." + try: return HyInteger(obj) except ValueError: @@ -312,13 +330,9 @@ def t_identifier(p): except ValueError: pass - if obj.startswith(":"): + if obj.startswith(":") and "." not in obj: return HyKeyword(obj) - obj = ".".join([hy_symbol_mangle(part) for part in obj.split(".")]) - - return HySymbol(obj) - @pg.error def error_handler(token): diff --git a/tests/native_tests/language.hy b/tests/native_tests/language.hy index 49b5305..5b2421f 100644 --- a/tests/native_tests/language.hy +++ b/tests/native_tests/language.hy @@ -1446,7 +1446,6 @@ (assert (= (keyword 'foo) :foo)) (assert (= (keyword 'foo-bar) :foo-bar)) (assert (= (keyword 1) :1)) - (assert (= (keyword 1.0) :1.0)) (assert (= (keyword :foo_bar) :foo-bar))) (defn test-name-conversion [] diff --git a/tests/test_lex.py b/tests/test_lex.py index de49076..e5f4322 100644 --- a/tests/test_lex.py +++ b/tests/test_lex.py @@ -129,6 +129,21 @@ def test_lex_digit_separators(): [HySymbol(",,,,___,__1__,,__,,2__,q,__")]) +def test_lex_bad_attrs(): + with lexe(): tokenize("1.foo") + with lexe(): tokenize("0.foo") + with lexe(): tokenize("1.5.foo") + with lexe(): tokenize("1e3.foo") + with lexe(): tokenize("5j.foo") + with lexe(): tokenize("3+5j.foo") + with lexe(): tokenize("3.1+5.1j.foo") + assert tokenize("j.foo") + with lexe(): tokenize("3/4.foo") + assert tokenize("a/1.foo") + assert tokenize("1/a.foo") + with lexe(): tokenize(":hello.foo") + + def test_lex_line_counting(): """ Make sure we can count lines / columns """ entry = tokenize("(foo (one two))")[0]