Use pytest.raises in test_lex
This commit is contained in:
parent
e92ef484a0
commit
c8736ebd0c
2
Makefile
2
Makefile
@ -46,7 +46,7 @@ tox: venv
|
|||||||
tox
|
tox
|
||||||
|
|
||||||
flake:
|
flake:
|
||||||
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E305
|
flake8 hy tests --ignore=E121,E123,E126,E226,E24,E704,W503,E302,E305,E701
|
||||||
|
|
||||||
clear:
|
clear:
|
||||||
clear
|
clear
|
||||||
|
@ -5,56 +5,31 @@
|
|||||||
from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol,
|
from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol,
|
||||||
HyString, HyDict, HyList, HySet, HyCons)
|
HyString, HyDict, HyList, HySet, HyCons)
|
||||||
from hy.lex import LexException, PrematureEndOfInput, tokenize
|
from hy.lex import LexException, PrematureEndOfInput, tokenize
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
def peoi(): return pytest.raises(PrematureEndOfInput)
|
||||||
|
def lexe(): return pytest.raises(LexException)
|
||||||
|
|
||||||
|
|
||||||
def test_lex_exception():
|
def test_lex_exception():
|
||||||
""" Ensure tokenize throws a fit on a partial input """
|
""" Ensure tokenize throws a fit on a partial input """
|
||||||
try:
|
with peoi(): tokenize("(foo")
|
||||||
tokenize("(foo")
|
with peoi(): tokenize("{foo bar")
|
||||||
assert True is False
|
with peoi(): tokenize("(defn foo [bar]")
|
||||||
except PrematureEndOfInput:
|
with peoi(): tokenize("(foo \"bar")
|
||||||
pass
|
|
||||||
try:
|
|
||||||
tokenize("{foo bar")
|
|
||||||
assert True is False
|
|
||||||
except PrematureEndOfInput:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
tokenize("(defn foo [bar]")
|
|
||||||
assert True is False
|
|
||||||
except PrematureEndOfInput:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
tokenize("(foo \"bar")
|
|
||||||
assert True is False
|
|
||||||
except PrematureEndOfInput:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_unbalanced_exception():
|
def test_unbalanced_exception():
|
||||||
"""Ensure the tokenization fails on unbalanced expressions"""
|
"""Ensure the tokenization fails on unbalanced expressions"""
|
||||||
try:
|
with lexe(): tokenize("(bar))")
|
||||||
tokenize("(bar))")
|
with lexe(): tokenize("(baz [quux]])")
|
||||||
assert True is False
|
|
||||||
except LexException:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
tokenize("(baz [quux]])")
|
|
||||||
assert True is False
|
|
||||||
except LexException:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def test_lex_single_quote_err():
|
def test_lex_single_quote_err():
|
||||||
"Ensure tokenizing \"' \" throws a LexException that can be stringified"
|
"Ensure tokenizing \"' \" throws a LexException that can be stringified"
|
||||||
# https://github.com/hylang/hy/issues/1252
|
# https://github.com/hylang/hy/issues/1252
|
||||||
try:
|
with lexe() as e: tokenize("' ")
|
||||||
tokenize("' ")
|
assert "Could not identify the next token" in str(e.value)
|
||||||
except LexException as e:
|
|
||||||
assert "Could not identify the next token" in str(e)
|
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
|
|
||||||
|
|
||||||
def test_lex_expression_symbols():
|
def test_lex_expression_symbols():
|
||||||
|
Loading…
Reference in New Issue
Block a user