hy/tests/lex/test_lex.py

103 lines
3.0 KiB
Python
Raw Normal View History

2013-03-03 02:24:32 +01:00
# Copyright (c) 2012 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
2013-03-01 04:37:23 +01:00
from hy.models.expression import HyExpression
2013-03-03 01:28:10 +01:00
from hy.models.integer import HyInteger
2013-03-01 04:37:23 +01:00
from hy.models.symbol import HySymbol
2013-03-03 00:40:00 +01:00
from hy.models.string import HyString
2013-03-01 04:27:20 +01:00
2013-03-03 01:48:29 +01:00
from hy.lex.states import LexException
2013-03-03 01:28:10 +01:00
from hy.lex import tokenize
2013-03-01 04:27:20 +01:00
2013-03-03 01:48:29 +01:00
def test_lex_exception():
""" Ensure tokenize throws a fit on a partial input """
try:
objs = tokenize("(foo")
assert True == False
except LexException:
pass
try:
objs = tokenize("&foo&")
assert True == False
except LexException:
pass
2013-03-03 01:48:29 +01:00
2013-03-03 00:40:00 +01:00
def test_lex_expression_symbols():
2013-03-03 01:48:29 +01:00
""" Make sure that expressions produce symbols """
2013-03-01 04:27:20 +01:00
objs = tokenize("(foo bar)")
2013-03-01 04:37:23 +01:00
assert objs == [HyExpression([HySymbol("foo"), HySymbol("bar")])]
2013-03-03 00:40:00 +01:00
2013-03-03 01:41:55 +01:00
2013-03-03 00:40:00 +01:00
def test_lex_expression_strings():
2013-03-03 01:48:29 +01:00
""" Test that expressions can produce symbols """
2013-03-03 00:40:00 +01:00
objs = tokenize("(foo \"bar\")")
assert objs == [HyExpression([HySymbol("foo"), HyString("bar")])]
2013-03-03 01:28:10 +01:00
2013-03-03 01:41:55 +01:00
2013-03-03 01:28:10 +01:00
def test_lex_expression_integer():
2013-03-03 01:48:29 +01:00
""" Make sure expressions can produce integers """
2013-03-03 01:28:10 +01:00
objs = tokenize("(foo 2)")
assert objs == [HyExpression([HySymbol("foo"), HyInteger(2)])]
2013-03-03 01:41:55 +01:00
def test_lex_line_counting():
2013-03-03 01:48:29 +01:00
""" Make sure we can count lines / columns """
2013-03-03 02:41:57 +01:00
entry = tokenize("(foo (one two))")[0]
2013-03-03 01:41:55 +01:00
assert entry.start_line == 1
assert entry.start_column == 1
assert entry.end_line == 1
2013-03-03 02:41:57 +01:00
assert entry.end_column == 15
entry = entry[1]
assert entry.start_line == 1
assert entry.start_column == 6
assert entry.end_line == 1
assert entry.end_column == 14
def test_lex_line_counting_multi():
""" Make sure we can do multi-line tokenization """
entries = tokenize("""
(foo (one two))
(foo bar)
""")
entry = entries[0]
assert entry.start_line == 2
assert entry.start_column == 1
assert entry.end_line == 2
assert entry.end_column == 15
entry = entries[1]
assert entry.start_line == 3
assert entry.start_column == 1
assert entry.end_line == 3
assert entry.end_column == 9