Merge pull request #1354 from gilch/comment-discard
add #_ discard syntax
This commit is contained in:
commit
dea99c8b76
2
NEWS
2
NEWS
@ -14,6 +14,8 @@ Changes from 0.13.0
|
||||
``(eval `(+ 1 ~(HyInteger n)))``
|
||||
* Literal `Inf`s and `NaN`s must now be capitalized like that
|
||||
* `get` is available as a function
|
||||
* new `comment` macro
|
||||
* support EDN `#_` syntax to discard the next term
|
||||
|
||||
[ Bug Fixes ]
|
||||
* Numeric literals are no longer parsed as symbols when followed by a dot
|
||||
|
@ -89,6 +89,31 @@ the error ``Keyword argument :foo needs a value``. To avoid this, you can quote
|
||||
the keyword, as in ``(f ':foo)``, or use it as the value of another keyword
|
||||
argument, as in ``(f :arg :foo)``.
|
||||
|
||||
discard prefix
|
||||
--------------
|
||||
|
||||
Hy supports the Extensible Data Notation discard prefix, like Clojure.
|
||||
Any form prefixed with ``#_`` is discarded instead of compiled.
|
||||
This completely removes the form so it doesn't evaluate to anything,
|
||||
not even None.
|
||||
It's often more useful than linewise comments for commenting out a
|
||||
form, because it respects code structure even when part of another
|
||||
form is on the same line. For example:
|
||||
|
||||
.. code-block:: clj
|
||||
|
||||
=> (print "Hy" "cruel" "World!")
|
||||
Hy cruel World!
|
||||
=> (print "Hy" #_"cruel" "World!")
|
||||
Hy World!
|
||||
=> (+ 1 1 (print "Math is hard!"))
|
||||
Math is hard!
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: unsupported operand type(s) for +: 'int' and 'NoneType'
|
||||
=> (+ 1 1 #_(print "Math is hard!"))
|
||||
2
|
||||
|
||||
Built-Ins
|
||||
=========
|
||||
|
||||
@ -328,6 +353,30 @@ as the user enters *k*.
|
||||
(print "Try again")))
|
||||
|
||||
|
||||
comment
|
||||
----
|
||||
|
||||
The ``comment`` macro ignores its body and always expands to ``None``.
|
||||
Unlike linewise comments, the body of the ``comment`` macro must
|
||||
be grammatically valid Hy, so the compiler can tell where the comment ends.
|
||||
Besides the semicolon linewise comments,
|
||||
Hy also has the ``#_`` discard prefix syntax to discard the next form.
|
||||
This is completely discarded and doesn't expand to anything, not even ``None``.
|
||||
|
||||
.. code-block:: clj
|
||||
|
||||
=> (print (comment <h1>Suprise!</h1>
|
||||
... <p>You'd be surprised what's grammatically valid in Hy.</p>
|
||||
... <p>(Keep delimiters in balance, and you're mostly good to go.)</p>)
|
||||
... "Hy")
|
||||
None Hy
|
||||
=> (print #_(comment <h1>Suprise!</h1>
|
||||
... <p>You'd be surprised what's grammatically valid in Hy.</p>
|
||||
... <p>(Keep delimiters in balance, and you're mostly good to go.)</p>))
|
||||
... "Hy")
|
||||
Hy
|
||||
|
||||
|
||||
cond
|
||||
----
|
||||
|
||||
|
@ -214,3 +214,7 @@
|
||||
(setv decorators (cut expr None -1)
|
||||
fndef (get expr -1))
|
||||
`(with-decorator ~@decorators ~fndef))
|
||||
|
||||
(defmacro comment [&rest body]
|
||||
"Ignores body and always expands to None"
|
||||
None)
|
||||
|
@ -25,6 +25,7 @@ lg.add('QUOTE', r'\'%s' % end_quote)
|
||||
lg.add('QUASIQUOTE', r'`%s' % end_quote)
|
||||
lg.add('UNQUOTESPLICE', r'~@%s' % end_quote)
|
||||
lg.add('UNQUOTE', r'~%s' % end_quote)
|
||||
lg.add('DISCARD', r'#_')
|
||||
lg.add('HASHSTARS', r'#\*+')
|
||||
lg.add('HASHOTHER', r'#%s' % identifier)
|
||||
|
||||
|
@ -153,6 +153,17 @@ def list_contents_single(p):
|
||||
return [p[0]]
|
||||
|
||||
|
||||
@pg.production("list_contents : DISCARD term discarded_list_contents")
|
||||
def list_contents_empty(p):
|
||||
return []
|
||||
|
||||
|
||||
@pg.production("discarded_list_contents : DISCARD term discarded_list_contents")
|
||||
@pg.production("discarded_list_contents :")
|
||||
def discarded_list_contents(p):
|
||||
pass
|
||||
|
||||
|
||||
@pg.production("term : identifier")
|
||||
@pg.production("term : paren")
|
||||
@pg.production("term : dict")
|
||||
@ -163,6 +174,11 @@ def term(p):
|
||||
return p[0]
|
||||
|
||||
|
||||
@pg.production("term : DISCARD term term")
|
||||
def term_discard(p):
|
||||
return p[2]
|
||||
|
||||
|
||||
@pg.production("term : QUOTE term")
|
||||
@set_quote_boundaries
|
||||
def term_quote(p):
|
||||
|
@ -678,3 +678,7 @@ result['y in globals'] = 'y' in globals()")
|
||||
[1 6 21])
|
||||
(assert-equal ((juxt identity) 42)
|
||||
[42]))
|
||||
|
||||
(defn test-comment []
|
||||
(assert-none (comment <h1>This is merely a comment.</h1>
|
||||
<p> Move along. (Nothing to see here.)</p>)))
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
from math import isnan
|
||||
from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol,
|
||||
HyString, HyDict, HyList, HySet, HyCons)
|
||||
HyString, HyDict, HyList, HySet, HyCons, HyKeyword)
|
||||
from hy.lex import LexException, PrematureEndOfInput, tokenize
|
||||
import pytest
|
||||
|
||||
@ -423,3 +423,72 @@ def test_cons_list():
|
||||
entry = tokenize("(a b . {})")[0]
|
||||
assert entry == HyDict([HySymbol("a"), HySymbol("b")])
|
||||
assert type(entry) == HyDict
|
||||
|
||||
def test_discard():
|
||||
"""Check that discarded terms are removed properly."""
|
||||
# empty
|
||||
assert tokenize("") == []
|
||||
# single
|
||||
assert tokenize("#_1") == []
|
||||
# multiple
|
||||
assert tokenize("#_1 #_2") == []
|
||||
assert tokenize("#_1 #_2 #_3") == []
|
||||
# nested discard
|
||||
assert tokenize("#_ #_1 2") == []
|
||||
assert tokenize("#_ #_ #_1 2 3") == []
|
||||
# trailing
|
||||
assert tokenize("0") == [0]
|
||||
assert tokenize("0 #_1") == [0]
|
||||
assert tokenize("0 #_1 #_2") == [0]
|
||||
# leading
|
||||
assert tokenize("2") == [2]
|
||||
assert tokenize("#_1 2") == [2]
|
||||
assert tokenize("#_0 #_1 2") == [2]
|
||||
assert tokenize("#_ #_0 1 2") == [2]
|
||||
# both
|
||||
assert tokenize("#_1 2 #_3") == [2]
|
||||
assert tokenize("#_0 #_1 2 #_ #_3 4") == [2]
|
||||
# inside
|
||||
assert tokenize("0 #_1 2") == [0, 2]
|
||||
assert tokenize("0 #_1 #_2 3") == [0, 3]
|
||||
assert tokenize("0 #_ #_1 2 3") == [0, 3]
|
||||
# in HyList
|
||||
assert tokenize("[]") == [HyList([])]
|
||||
assert tokenize("[#_1]") == [HyList([])]
|
||||
assert tokenize("[#_1 #_2]") == [HyList([])]
|
||||
assert tokenize("[#_ #_1 2]") == [HyList([])]
|
||||
assert tokenize("[0]") == [HyList([HyInteger(0)])]
|
||||
assert tokenize("[0 #_1]") == [HyList([HyInteger(0)])]
|
||||
assert tokenize("[0 #_1 #_2]") == [HyList([HyInteger(0)])]
|
||||
assert tokenize("[2]") == [HyList([HyInteger(2)])]
|
||||
assert tokenize("[#_1 2]") == [HyList([HyInteger(2)])]
|
||||
assert tokenize("[#_0 #_1 2]") == [HyList([HyInteger(2)])]
|
||||
assert tokenize("[#_ #_0 1 2]") == [HyList([HyInteger(2)])]
|
||||
# in HySet
|
||||
assert tokenize("#{}") == [HySet()]
|
||||
assert tokenize("#{#_1}") == [HySet()]
|
||||
assert tokenize("#{0 #_1}") == [HySet([HyInteger(0)])]
|
||||
assert tokenize("#{#_1 0}") == [HySet([HyInteger(0)])]
|
||||
# in HyDict
|
||||
assert tokenize("{}") == [HyDict()]
|
||||
assert tokenize("{#_1}") == [HyDict()]
|
||||
assert tokenize("{#_0 1 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
|
||||
assert tokenize("{1 #_0 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
|
||||
assert tokenize("{1 2 #_0}") == [HyDict([HyInteger(1), HyInteger(2)])]
|
||||
# in HyExpression
|
||||
assert tokenize("()") == [HyExpression()]
|
||||
assert tokenize("(#_foo)") == [HyExpression()]
|
||||
assert tokenize("(#_foo bar)") == [HyExpression([HySymbol("bar")])]
|
||||
assert tokenize("(foo #_bar)") == [HyExpression([HySymbol("foo")])]
|
||||
assert tokenize("(foo :bar 1)") == [HyExpression([HySymbol("foo"), HyKeyword(":bar"), HyInteger(1)])]
|
||||
assert tokenize("(foo #_:bar 1)") == [HyExpression([HySymbol("foo"), HyInteger(1)])]
|
||||
assert tokenize("(foo :bar #_1)") == [HyExpression([HySymbol("foo"), HyKeyword(":bar")])]
|
||||
# discard term with nesting
|
||||
assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
|
||||
HyList([HyInteger(1), HyInteger(2), HyInteger(3), HyInteger(4)])
|
||||
]
|
||||
# discard with other prefix syntax
|
||||
assert tokenize("a #_'b c") == [HySymbol("a"), HySymbol("c")]
|
||||
assert tokenize("a '#_b c") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("c")])]
|
||||
assert tokenize("a '#_b #_c d") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("d")])]
|
||||
assert tokenize("a '#_ #_b c d") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("d")])]
|
||||
|
Loading…
Reference in New Issue
Block a user