2013-03-18 15:27:14 +01:00
|
|
|
# Copyright (c) 2013 Paul Tagliamonte <paultag@debian.org>
|
2013-03-03 02:24:32 +01:00
|
|
|
#
|
|
|
|
# Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
# copy of this software and associated documentation files (the "Software"),
|
|
|
|
# to deal in the Software without restriction, including without limitation
|
|
|
|
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
# and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
# Software is furnished to do so, subject to the following conditions:
|
|
|
|
#
|
|
|
|
# The above copyright notice and this permission notice shall be included in
|
|
|
|
# all copies or substantial portions of the Software.
|
|
|
|
#
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
|
|
# DEALINGS IN THE SOFTWARE.
|
|
|
|
|
2013-03-01 04:37:23 +01:00
|
|
|
from hy.models.expression import HyExpression
|
2013-03-03 01:28:10 +01:00
|
|
|
from hy.models.integer import HyInteger
|
2013-03-01 04:37:23 +01:00
|
|
|
from hy.models.symbol import HySymbol
|
2013-03-03 00:40:00 +01:00
|
|
|
from hy.models.string import HyString
|
2013-03-07 02:59:45 +01:00
|
|
|
from hy.models.dict import HyDict
|
2013-03-01 04:27:20 +01:00
|
|
|
|
2013-03-03 01:48:29 +01:00
|
|
|
from hy.lex.states import LexException
|
|
|
|
|
2013-03-03 01:28:10 +01:00
|
|
|
from hy.lex import tokenize
|
|
|
|
|
2013-03-01 04:27:20 +01:00
|
|
|
|
2013-03-03 01:48:29 +01:00
|
|
|
def test_lex_exception():
|
|
|
|
""" Ensure tokenize throws a fit on a partial input """
|
|
|
|
try:
|
2013-04-06 21:22:35 +02:00
|
|
|
tokenize("(foo")
|
2013-04-02 02:00:37 +02:00
|
|
|
assert True is False
|
2013-03-03 01:48:29 +01:00
|
|
|
except LexException:
|
|
|
|
pass
|
|
|
|
|
2013-03-03 03:08:23 +01:00
|
|
|
try:
|
2013-04-06 21:22:35 +02:00
|
|
|
tokenize("&foo&")
|
2013-04-02 02:00:37 +02:00
|
|
|
assert True is False
|
2013-04-08 00:17:42 +02:00
|
|
|
except LexException:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def test_unbalanced_exception():
|
|
|
|
"""Ensure the tokenization fails on unbalanced expressions"""
|
|
|
|
try:
|
|
|
|
tokenize("(bar))")
|
|
|
|
assert True is False
|
|
|
|
except LexException:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
tokenize("(baz [quux]])")
|
|
|
|
assert True is False
|
2013-03-03 03:08:23 +01:00
|
|
|
except LexException:
|
|
|
|
pass
|
|
|
|
|
2013-03-03 01:48:29 +01:00
|
|
|
|
2013-03-03 00:40:00 +01:00
|
|
|
def test_lex_expression_symbols():
|
2013-03-03 01:48:29 +01:00
|
|
|
""" Make sure that expressions produce symbols """
|
2013-03-01 04:27:20 +01:00
|
|
|
objs = tokenize("(foo bar)")
|
2013-03-01 04:37:23 +01:00
|
|
|
assert objs == [HyExpression([HySymbol("foo"), HySymbol("bar")])]
|
2013-03-03 00:40:00 +01:00
|
|
|
|
2013-03-03 01:41:55 +01:00
|
|
|
|
2013-03-03 00:40:00 +01:00
|
|
|
def test_lex_expression_strings():
|
2013-03-03 01:48:29 +01:00
|
|
|
""" Test that expressions can produce symbols """
|
2013-03-03 00:40:00 +01:00
|
|
|
objs = tokenize("(foo \"bar\")")
|
|
|
|
assert objs == [HyExpression([HySymbol("foo"), HyString("bar")])]
|
2013-03-03 01:28:10 +01:00
|
|
|
|
2013-03-03 01:41:55 +01:00
|
|
|
|
2013-03-03 01:28:10 +01:00
|
|
|
def test_lex_expression_integer():
|
2013-03-03 01:48:29 +01:00
|
|
|
""" Make sure expressions can produce integers """
|
2013-03-03 01:28:10 +01:00
|
|
|
objs = tokenize("(foo 2)")
|
|
|
|
assert objs == [HyExpression([HySymbol("foo"), HyInteger(2)])]
|
2013-03-03 01:41:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_lex_line_counting():
|
2013-03-03 01:48:29 +01:00
|
|
|
""" Make sure we can count lines / columns """
|
2013-03-03 02:41:57 +01:00
|
|
|
entry = tokenize("(foo (one two))")[0]
|
2013-03-03 01:41:55 +01:00
|
|
|
|
|
|
|
assert entry.start_line == 1
|
|
|
|
assert entry.start_column == 1
|
|
|
|
|
|
|
|
assert entry.end_line == 1
|
2013-03-03 02:41:57 +01:00
|
|
|
assert entry.end_column == 15
|
|
|
|
|
|
|
|
entry = entry[1]
|
|
|
|
assert entry.start_line == 1
|
|
|
|
assert entry.start_column == 6
|
|
|
|
|
|
|
|
assert entry.end_line == 1
|
|
|
|
assert entry.end_column == 14
|
2013-03-03 03:08:23 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_lex_line_counting_multi():
|
|
|
|
""" Make sure we can do multi-line tokenization """
|
|
|
|
entries = tokenize("""
|
|
|
|
(foo (one two))
|
|
|
|
(foo bar)
|
|
|
|
""")
|
|
|
|
|
|
|
|
entry = entries[0]
|
|
|
|
|
|
|
|
assert entry.start_line == 2
|
|
|
|
assert entry.start_column == 1
|
|
|
|
|
|
|
|
assert entry.end_line == 2
|
|
|
|
assert entry.end_column == 15
|
|
|
|
|
|
|
|
entry = entries[1]
|
|
|
|
assert entry.start_line == 3
|
|
|
|
assert entry.start_column == 1
|
|
|
|
|
|
|
|
assert entry.end_line == 3
|
|
|
|
assert entry.end_column == 9
|
2013-03-03 20:03:59 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_lex_line_counting_multi_inner():
|
|
|
|
""" Make sure we can do multi-line tokenization (inner) """
|
|
|
|
entry = tokenize("""(foo
|
|
|
|
bar)""")[0]
|
|
|
|
inner = entry[0]
|
|
|
|
|
|
|
|
assert inner.start_line == 1
|
|
|
|
assert inner.start_column == 2
|
|
|
|
|
|
|
|
inner = entry[1]
|
|
|
|
|
|
|
|
assert inner.start_line == 2
|
|
|
|
assert inner.start_column == 5
|
2013-03-07 02:59:45 +01:00
|
|
|
|
|
|
|
|
2013-03-08 01:23:11 +01:00
|
|
|
def test_dicts():
|
2013-03-07 02:59:45 +01:00
|
|
|
""" Ensure that we can tokenize a dict. """
|
|
|
|
objs = tokenize("{foo bar bar baz}")
|
|
|
|
assert objs == [HyDict({
|
|
|
|
"foo": "bar",
|
|
|
|
"bar": "baz"
|
|
|
|
})]
|
2013-03-08 01:23:11 +01:00
|
|
|
|
2013-03-08 05:01:17 +01:00
|
|
|
objs = tokenize("(bar {foo bar bar baz})")
|
|
|
|
assert objs == [HyExpression([HySymbol("bar"),
|
|
|
|
HyDict({"foo": "bar",
|
|
|
|
"bar": "baz"})])]
|
|
|
|
|
2013-03-08 01:23:11 +01:00
|
|
|
|
|
|
|
def test_nospace():
|
|
|
|
""" Ensure we can tokenize without spaces if we have to """
|
|
|
|
entry = tokenize("(foo(one two))")[0]
|
|
|
|
|
|
|
|
assert entry.start_line == 1
|
|
|
|
assert entry.start_column == 1
|
|
|
|
|
|
|
|
assert entry.end_line == 1
|
|
|
|
assert entry.end_column == 14
|
|
|
|
|
|
|
|
entry = entry[1]
|
|
|
|
assert entry.start_line == 1
|
|
|
|
assert entry.start_column == 5
|
|
|
|
|
|
|
|
assert entry.end_line == 1
|
|
|
|
assert entry.end_column == 13
|
2013-04-02 04:07:05 +02:00
|
|
|
|
|
|
|
|
|
|
|
def test_escapes():
|
|
|
|
""" Ensure we can escape things """
|
|
|
|
entry = tokenize("(foo \"foo\\n\")")[0]
|
|
|
|
assert entry[1] == "foo\n"
|
|
|
|
|
|
|
|
try:
|
|
|
|
entry = tokenize("(foo \"foo\s\")")[0]
|
|
|
|
assert True is False
|
|
|
|
except LexException:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def test_hashbang():
|
|
|
|
""" Ensure we can escape things """
|
|
|
|
entry = tokenize("#!this is a comment\n")
|
|
|
|
assert entry == []
|