Use pytest.raises in test_lex
This commit is contained in:
parent
e92ef484a0
commit
c8736ebd0c
2
Makefile
2
Makefile
@ -46,7 +46,7 @@ tox: venv
|
||||
tox
|
||||
|
||||
flake:
|
||||
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E305
|
||||
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E302,E305,E701
|
||||
|
||||
clear:
|
||||
clear
|
||||
|
@ -5,56 +5,31 @@
|
||||
from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol,
|
||||
HyString, HyDict, HyList, HySet, HyCons)
|
||||
from hy.lex import LexException, PrematureEndOfInput, tokenize
|
||||
import pytest
|
||||
|
||||
def peoi(): return pytest.raises(PrematureEndOfInput)
|
||||
def lexe(): return pytest.raises(LexException)
|
||||
|
||||
|
||||
def test_lex_exception():
|
||||
""" Ensure tokenize throws a fit on a partial input """
|
||||
try:
|
||||
tokenize("(foo")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
try:
|
||||
tokenize("{foo bar")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
try:
|
||||
tokenize("(defn foo [bar]")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
try:
|
||||
tokenize("(foo \"bar")
|
||||
assert True is False
|
||||
except PrematureEndOfInput:
|
||||
pass
|
||||
with peoi(): tokenize("(foo")
|
||||
with peoi(): tokenize("{foo bar")
|
||||
with peoi(): tokenize("(defn foo [bar]")
|
||||
with peoi(): tokenize("(foo \"bar")
|
||||
|
||||
|
||||
def test_unbalanced_exception():
|
||||
"""Ensure the tokenization fails on unbalanced expressions"""
|
||||
try:
|
||||
tokenize("(bar))")
|
||||
assert True is False
|
||||
except LexException:
|
||||
pass
|
||||
|
||||
try:
|
||||
tokenize("(baz [quux]])")
|
||||
assert True is False
|
||||
except LexException:
|
||||
pass
|
||||
with lexe(): tokenize("(bar))")
|
||||
with lexe(): tokenize("(baz [quux]])")
|
||||
|
||||
|
||||
def test_lex_single_quote_err():
|
||||
"Ensure tokenizing \"' \" throws a LexException that can be stringified"
|
||||
# https://github.com/hylang/hy/issues/1252
|
||||
try:
|
||||
tokenize("' ")
|
||||
except LexException as e:
|
||||
assert "Could not identify the next token" in str(e)
|
||||
else:
|
||||
assert False
|
||||
with lexe() as e: tokenize("' ")
|
||||
assert "Could not identify the next token" in str(e.value)
|
||||
|
||||
|
||||
def test_lex_expression_symbols():
|
||||
|
Loading…
Reference in New Issue
Block a user