Merge pull request #1296 from Kodiologist/literal-attr
Don't parse 5.attr or :foo.attr as symbols
This commit is contained in:
commit
a62faf7b8a
2
Makefile
2
Makefile
@ -46,7 +46,7 @@ tox: venv
|
||||
tox
|
||||
|
||||
flake:
|
||||
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E305
|
||||
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E302,E305,E701
|
||||
|
||||
clear:
|
||||
clear
|
||||
|
5
NEWS
5
NEWS
@ -3,6 +3,11 @@ Changes from 0.13.0
|
||||
[ Language Changes ]
|
||||
* Single-character "sharp macros" changed to "tag macros", which can have
|
||||
longer names
|
||||
* Periods are no longer allowed in keywords
|
||||
|
||||
[ Bug Fixes ]
|
||||
* Numeric literals are no longer parsed as symbols when followed by a dot
|
||||
and a symbol
|
||||
|
||||
Changes from 0.12.1
|
||||
|
||||
|
@ -288,6 +288,24 @@ def t_partial_string(p):
|
||||
def t_identifier(p):
|
||||
obj = p[0].value
|
||||
|
||||
val = symbol_like(obj)
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
if "." in obj and symbol_like(obj.split(".", 1)[0]) is not None:
|
||||
# E.g., `5.attr` or `:foo.attr`
|
||||
raise LexException(
|
||||
'Cannot access attribute on anything other than a name (in '
|
||||
'order to get attributes of expressions, use '
|
||||
'`(. <expression> <attr>)` or `(.<attr> <expression>)`)',
|
||||
p[0].source_pos.lineno, p[0].source_pos.colno)
|
||||
|
||||
return HySymbol(".".join(hy_symbol_mangle(x) for x in obj.split(".")))
|
||||
|
||||
|
||||
def symbol_like(obj):
|
||||
"Try to interpret `obj` as a number or keyword."
|
||||
|
||||
try:
|
||||
return HyInteger(obj)
|
||||
except ValueError:
|
||||
@ -312,13 +330,9 @@ def t_identifier(p):
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if obj.startswith(":"):
|
||||
if obj.startswith(":") and "." not in obj:
|
||||
return HyKeyword(obj)
|
||||
|
||||
obj = ".".join([hy_symbol_mangle(part) for part in obj.split(".")])
|
||||
|
||||
return HySymbol(obj)
|
||||
|
||||
|
||||
@pg.error
|
||||
def error_handler(token):
|
||||
|
@ -1446,7 +1446,6 @@
|
||||
(assert (= (keyword 'foo) :foo))
|
||||
(assert (= (keyword 'foo-bar) :foo-bar))
|
||||
(assert (= (keyword 1) :1))
|
||||
(assert (= (keyword 1.0) :1.0))
|
||||
(assert (= (keyword :foo_bar) :foo-bar)))
|
||||
|
||||
(defn test-name-conversion []
|
||||
|
@ -5,56 +5,31 @@
|
||||
from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol,
|
||||
HyString, HyDict, HyList, HySet, HyCons)
|
||||
from hy.lex import LexException, PrematureEndOfInput, tokenize
|
||||
import pytest
|
||||
|
||||
def peoi(): return pytest.raises(PrematureEndOfInput)
|
||||
def lexe(): return pytest.raises(LexException)
|
||||
|
||||
|
||||
def test_lex_exception():
|
||||
""" Ensure tokenize throws a fit on a partial input """
|
||||
try:
|
||||
tokenize("(foo")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
try:
|
||||
tokenize("{foo bar")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
try:
|
||||
tokenize("(defn foo [bar]")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
try:
|
||||
tokenize("(foo \"bar")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
with peoi(): tokenize("(foo")
|
||||
with peoi(): tokenize("{foo bar")
|
||||
with peoi(): tokenize("(defn foo [bar]")
|
||||
with peoi(): tokenize("(foo \"bar")
|
||||
|
||||
|
||||
def test_unbalanced_exception():
|
||||
"""Ensure the tokenization fails on unbalanced expressions"""
|
||||
try:
|
||||
tokenize("(bar))")
|
||||
assert True is False
|
||||
except LexException:
|
||||
pass
|
||||
|
||||
try:
|
||||
tokenize("(baz [quux]])")
|
||||
assert True is False
|
||||
except LexException:
|
||||
pass
|
||||
with lexe(): tokenize("(bar))")
|
||||
with lexe(): tokenize("(baz [quux]])")
|
||||
|
||||
|
||||
def test_lex_single_quote_err():
|
||||
"Ensure tokenizing \"' \" throws a LexException that can be stringified"
|
||||
# https://github.com/hylang/hy/issues/1252
|
||||
try:
|
||||
tokenize("' ")
|
||||
except LexException as e:
|
||||
assert "Could not identify the next token" in str(e)
|
||||
else:
|
||||
assert False
|
||||
with lexe() as e: tokenize("' ")
|
||||
assert "Could not identify the next token" in str(e.value)
|
||||
|
||||
|
||||
def test_lex_expression_symbols():
|
||||
@ -154,6 +129,21 @@ def test_lex_digit_separators():
|
||||
[HySymbol(",,,,___,__1__,,__,,2__,q,__")])
|
||||
|
||||
|
||||
def test_lex_bad_attrs():
|
||||
with lexe(): tokenize("1.foo")
|
||||
with lexe(): tokenize("0.foo")
|
||||
with lexe(): tokenize("1.5.foo")
|
||||
with lexe(): tokenize("1e3.foo")
|
||||
with lexe(): tokenize("5j.foo")
|
||||
with lexe(): tokenize("3+5j.foo")
|
||||
with lexe(): tokenize("3.1+5.1j.foo")
|
||||
assert tokenize("j.foo")
|
||||
with lexe(): tokenize("3/4.foo")
|
||||
assert tokenize("a/1.foo")
|
||||
assert tokenize("1/a.foo")
|
||||
with lexe(): tokenize(":hello.foo")
|
||||
|
||||
|
||||
def test_lex_line_counting():
|
||||
""" Make sure we can count lines / columns """
|
||||
entry = tokenize("(foo (one two))")[0]
|
||||
|
Loading…
Reference in New Issue
Block a user