Merge pull request #1296 from Kodiologist/literal-attr

Don't parse 5.attr or :foo.attr as symbols
This commit is contained in:
Kodi Arfer 2017-06-23 08:42:20 -07:00 committed by GitHub
commit a62faf7b8a
5 changed files with 52 additions and 44 deletions

View File

@ -46,7 +46,7 @@ tox: venv
tox tox
flake: flake:
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E305 flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E302,E305,E701
clear: clear:
clear clear

5
NEWS
View File

@ -3,6 +3,11 @@ Changes from 0.13.0
[ Language Changes ] [ Language Changes ]
* Single-character "sharp macros" changed to "tag macros", which can have * Single-character "sharp macros" changed to "tag macros", which can have
longer names longer names
* Periods are no longer allowed in keywords
[ Bug Fixes ]
* Numeric literals are no longer parsed as symbols when followed by a dot
and a symbol
Changes from 0.12.1 Changes from 0.12.1

View File

@ -288,6 +288,24 @@ def t_partial_string(p):
def t_identifier(p): def t_identifier(p):
obj = p[0].value obj = p[0].value
val = symbol_like(obj)
if val is not None:
return val
if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None:
# E.g., `5.attr` or `:foo.attr`
raise LexException(
'Cannot access attribute on anything other than a name (in '
'order to get attributes of expressions, use '
'`(. <expression> <attr>)` or `(.<attr> <expression>)`)',
p[0].source_pos.lineno, p[0].source_pos.colno)
return HySymbol(".".join(hy_symbol_mangle(x) for x in obj.split(".")))
def symbol_like(obj):
"Try to interpret `obj` as a number or keyword."
try: try:
return HyInteger(obj) return HyInteger(obj)
except ValueError: except ValueError:
@ -312,13 +330,9 @@ def t_identifier(p):
except ValueError: except ValueError:
pass pass
if obj.startswith(":"): if obj.startswith(":") and "." not in obj:
return HyKeyword(obj) return HyKeyword(obj)
obj = ".".join([hy_symbol_mangle(part) for part in obj.split(".")])
return HySymbol(obj)
@pg.error @pg.error
def error_handler(token): def error_handler(token):

View File

@ -1446,7 +1446,6 @@
(assert (= (keyword 'foo) :foo)) (assert (= (keyword 'foo) :foo))
(assert (= (keyword 'foo-bar) :foo-bar)) (assert (= (keyword 'foo-bar) :foo-bar))
(assert (= (keyword 1) :1)) (assert (= (keyword 1) :1))
(assert (= (keyword 1.0) :1.0))
(assert (= (keyword :foo_bar) :foo-bar))) (assert (= (keyword :foo_bar) :foo-bar)))
(defn test-name-conversion [] (defn test-name-conversion []

View File

@ -5,56 +5,31 @@
from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol, from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol,
HyString, HyDict, HyList, HySet, HyCons) HyString, HyDict, HyList, HySet, HyCons)
from hy.lex import LexException, PrematureEndOfInput, tokenize from hy.lex import LexException, PrematureEndOfInput, tokenize
import pytest
def peoi(): return pytest.raises(PrematureEndOfInput)
def lexe(): return pytest.raises(LexException)
def test_lex_exception(): def test_lex_exception():
""" Ensure tokenize throws a fit on a partial input """ """ Ensure tokenize throws a fit on a partial input """
try: with peoi(): tokenize("(foo")
tokenize("(foo") with peoi(): tokenize("{foo bar")
assert True is False with peoi(): tokenize("(defn foo [bar]")
except PrematureEndOfInput: with peoi(): tokenize("(foo \"bar")
pass
try:
tokenize("{foo bar")
assert True is False
except PrematureEndOfInput:
pass
try:
tokenize("(defn foo [bar]")
assert True is False
except PrematureEndOfInput:
pass
try:
tokenize("(foo \"bar")
assert True is False
except PrematureEndOfInput:
pass
def test_unbalanced_exception(): def test_unbalanced_exception():
"""Ensure the tokenization fails on unbalanced expressions""" """Ensure the tokenization fails on unbalanced expressions"""
try: with lexe(): tokenize("(bar))")
tokenize("(bar))") with lexe(): tokenize("(baz [quux]])")
assert True is False
except LexException:
pass
try:
tokenize("(baz [quux]])")
assert True is False
except LexException:
pass
def test_lex_single_quote_err(): def test_lex_single_quote_err():
"Ensure tokenizing \"' \" throws a LexException that can be stringified" "Ensure tokenizing \"' \" throws a LexException that can be stringified"
# https://github.com/hylang/hy/issues/1252 # https://github.com/hylang/hy/issues/1252
try: with lexe() as e: tokenize("' ")
tokenize("' ") assert "Could not identify the next token" in str(e.value)
except LexException as e:
assert "Could not identify the next token" in str(e)
else:
assert False
def test_lex_expression_symbols(): def test_lex_expression_symbols():
@ -154,6 +129,21 @@ def test_lex_digit_separators():
[HySymbol(",,,,___,__1__,,__,,2__,q,__")]) [HySymbol(",,,,___,__1__,,__,,2__,q,__")])
def test_lex_bad_attrs():
with lexe(): tokenize("1.foo")
with lexe(): tokenize("0.foo")
with lexe(): tokenize("1.5.foo")
with lexe(): tokenize("1e3.foo")
with lexe(): tokenize("5j.foo")
with lexe(): tokenize("3+5j.foo")
with lexe(): tokenize("3.1+5.1j.foo")
assert tokenize("j.foo")
with lexe(): tokenize("3/4.foo")
assert tokenize("a/1.foo")
assert tokenize("1/a.foo")
with lexe(): tokenize(":hello.foo")
def test_lex_line_counting(): def test_lex_line_counting():
""" Make sure we can count lines / columns """ """ Make sure we can count lines / columns """
entry = tokenize("(foo (one two))")[0] entry = tokenize("(foo (one two))")[0]