Adding in some lexer testing.

This commit is contained in:
Paul Tagliamonte 2012-12-15 17:22:14 -05:00
parent 68cf93e6d0
commit d9b6fe7d79
4 changed files with 20 additions and 10 deletions

10
test.py
View File

@ -1,10 +0,0 @@
from hy.lex.tokenize import tokenize
print tokenize("""
(+ 2 (+ 1 1) (- 1 1))
""")
print tokenize("""
(print "Hello, \\n World")
""")

0
tests/__init__.py Normal file
View File

0
tests/lexer/__init__.py Normal file
View File

View File

@ -0,0 +1,20 @@
from hy.lex.tokenize import tokenize
def test_simple_tokenize():
assert [["+", "1", "1"]] == tokenize("(+ 1 1)")
def test_double_tokenize():
assert [
["+", "1", "2"],
["-", "1", "1"]
] == tokenize("(+ 1 2) (- 1 1)")
def test_simple_recurse():
assert [
'+', '1', [
'+', '1', '1'
]
] == tokenize("(+ 1 (+ 1 1))")