2012-12-15 23:22:14 +01:00
|
|
|
from hy.lex.tokenize import tokenize
|
|
|
|
|
|
|
|
|
|
|
|
def test_simple_tokenize():
|
2012-12-15 23:31:23 +01:00
|
|
|
"""Checking we can still handle something simple."""
|
|
|
|
|
2012-12-17 03:44:14 +01:00
|
|
|
assert [["+", 1, 1]] == tokenize("(+ 1 1)")
|
2012-12-15 23:22:14 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_double_tokenize():
|
2012-12-15 23:31:23 +01:00
|
|
|
"""Checking if we can lex two things at once."""
|
|
|
|
|
2012-12-15 23:22:14 +01:00
|
|
|
assert [
|
2012-12-17 03:44:14 +01:00
|
|
|
["+", 1, 2],
|
|
|
|
["-", 1, 1]
|
2012-12-15 23:22:14 +01:00
|
|
|
] == tokenize("(+ 1 2) (- 1 1)")
|
|
|
|
|
|
|
|
|
|
|
|
def test_simple_recurse():
|
2012-12-15 23:31:23 +01:00
|
|
|
""" Test recursion """
|
|
|
|
assert [
|
|
|
|
['fn',
|
|
|
|
'one',
|
|
|
|
['fn', 'two'],
|
|
|
|
]
|
|
|
|
] == tokenize("(fn one (fn two))")
|
|
|
|
|
|
|
|
|
|
|
|
def test_mid_recurse():
|
|
|
|
""" Test some crazy recursion """
|
|
|
|
|
2012-12-15 23:22:14 +01:00
|
|
|
assert [
|
2012-12-15 23:31:23 +01:00
|
|
|
['fn',
|
|
|
|
'one',
|
|
|
|
['fn', 'two'],
|
|
|
|
['fn', 'three'],
|
2012-12-15 23:22:14 +01:00
|
|
|
]
|
2012-12-15 23:31:23 +01:00
|
|
|
] == tokenize("(fn one (fn two)(fn three))")
|
|
|
|
|
|
|
|
|
2012-12-16 00:38:34 +01:00
|
|
|
def test_mid_recurse_comment():
|
|
|
|
""" Test some crazy recursion with a comment """
|
|
|
|
|
|
|
|
assert [
|
|
|
|
['fn',
|
|
|
|
'one',
|
|
|
|
['fn', 'two'],
|
|
|
|
['fn', 'three'],
|
|
|
|
]
|
|
|
|
] == tokenize("""
|
|
|
|
(fn one ; this is a test
|
|
|
|
(fn two)(fn three)) ; and so is this
|
|
|
|
""")
|
|
|
|
|
|
|
|
|
2012-12-15 23:31:23 +01:00
|
|
|
def test_full_recurse():
|
|
|
|
""" Test something we could see for real """
|
|
|
|
assert [
|
|
|
|
['fn',
|
|
|
|
'el',
|
|
|
|
['+',
|
2012-12-17 03:44:14 +01:00
|
|
|
1,
|
|
|
|
2,
|
2012-12-15 23:31:23 +01:00
|
|
|
['==',
|
2012-12-17 03:44:14 +01:00
|
|
|
1,
|
|
|
|
20
|
2012-12-15 23:31:23 +01:00
|
|
|
],
|
|
|
|
['-',
|
2012-12-17 03:44:14 +01:00
|
|
|
1,
|
|
|
|
1
|
2012-12-15 23:31:23 +01:00
|
|
|
],
|
|
|
|
]
|
|
|
|
],
|
|
|
|
['fn1', 'foo', 'bar']
|
|
|
|
] == tokenize("(fn el (+ 1 2 (== 1 20) (- 1 1)))(fn1 foo bar)")
|
2013-01-25 14:53:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_string():
|
|
|
|
""" Lex a lone string """
|
|
|
|
assert ['"a string"'] == tokenize('"a string"')
|