From 203965d0fa55cedbb9af6249c7c2aee4d54ed983 Mon Sep 17 00:00:00 2001 From: gilch Date: Wed, 2 Aug 2017 15:53:46 -0600 Subject: [PATCH 1/4] add #_ discard syntax --- hy/lex/lexer.py | 1 + hy/lex/parser.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/hy/lex/lexer.py b/hy/lex/lexer.py index b73c284..cb021a0 100755 --- a/hy/lex/lexer.py +++ b/hy/lex/lexer.py @@ -25,6 +25,7 @@ lg.add('QUOTE', r'\'%s' % end_quote) lg.add('QUASIQUOTE', r'`%s' % end_quote) lg.add('UNQUOTESPLICE', r'~@%s' % end_quote) lg.add('UNQUOTE', r'~%s' % end_quote) +lg.add('DISCARD', r'#_') lg.add('HASHSTARS', r'#\*+') lg.add('HASHOTHER', r'#%s' % identifier) diff --git a/hy/lex/parser.py b/hy/lex/parser.py index 0c73652..efac562 100755 --- a/hy/lex/parser.py +++ b/hy/lex/parser.py @@ -153,6 +153,17 @@ def list_contents_single(p): return [p[0]] +@pg.production("list_contents : DISCARD term discarded_list_contents") +def list_contents_empty(p): + return [] + + +@pg.production("discarded_list_contents : DISCARD term discarded_list_contents") +@pg.production("discarded_list_contents :") +def discarded_list_contents(p): + pass + + @pg.production("term : identifier") @pg.production("term : paren") @pg.production("term : dict") @@ -163,6 +174,11 @@ def term(p): return p[0] +@pg.production("term : DISCARD term term") +def term_discard(p): + return p[2] + + @pg.production("term : QUOTE term") @set_quote_boundaries def term_quote(p): From d0530b0e7e6866cdc0075a3e506b875a47aa5d53 Mon Sep 17 00:00:00 2001 From: gilch Date: Thu, 3 Aug 2017 19:04:30 -0600 Subject: [PATCH 2/4] add comment macro --- hy/core/macros.hy | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/hy/core/macros.hy b/hy/core/macros.hy index 82cf3ae..c533b02 100644 --- a/hy/core/macros.hy +++ b/hy/core/macros.hy @@ -216,3 +216,7 @@ (setv decorators (cut expr None -1) fndef (get expr -1)) `(with-decorator ~@decorators ~fndef)) + +(defmacro comment [&rest body] + "Ignores body and always expands to None" + None) From 0f81369debe82480fe7ef2efbc89054f74d68682 Mon Sep 17 00:00:00 2001 From: gilch Date: Thu, 3 Aug 2017 19:53:47 -0600 Subject: [PATCH 3/4] tests for #_ and (comment ...) --- tests/native_tests/core.hy | 4 +++ tests/test_lex.py | 71 +++++++++++++++++++++++++++++++++++++- 2 files changed, 74 insertions(+), 1 deletion(-) diff --git a/tests/native_tests/core.hy b/tests/native_tests/core.hy index dcef6f8..660cd75 100644 --- a/tests/native_tests/core.hy +++ b/tests/native_tests/core.hy @@ -635,3 +635,7 @@ [1 6 21]) (assert-equal ((juxt identity) 42) [42])) + +(defn test-comment [] + (assert-none (comment

This is merely a comment.

+

Move along. (Nothing to see here.)

))) diff --git a/tests/test_lex.py b/tests/test_lex.py index 2f7a207..247ae4d 100644 --- a/tests/test_lex.py +++ b/tests/test_lex.py @@ -4,7 +4,7 @@ from math import isnan from hy.models import (HyExpression, HyInteger, HyFloat, HyComplex, HySymbol, - HyString, HyDict, HyList, HySet, HyCons) + HyString, HyDict, HyList, HySet, HyCons, HyKeyword) from hy.lex import LexException, PrematureEndOfInput, tokenize import pytest @@ -423,3 +423,72 @@ def test_cons_list(): entry = tokenize("(a b . {})")[0] assert entry == HyDict([HySymbol("a"), HySymbol("b")]) assert type(entry) == HyDict + +def test_discard(): + """Check that discarded terms are removed properly.""" + # empty + assert tokenize("") == [] + # single + assert tokenize("#_1") == [] + # multiple + assert tokenize("#_1 #_2") == [] + assert tokenize("#_1 #_2 #_3") == [] + # nested discard + assert tokenize("#_ #_1 2") == [] + assert tokenize("#_ #_ #_1 2 3") == [] + # trailing + assert tokenize("0") == [0] + assert tokenize("0 #_1") == [0] + assert tokenize("0 #_1 #_2") == [0] + # leading + assert tokenize("2") == [2] + assert tokenize("#_1 2") == [2] + assert tokenize("#_0 #_1 2") == [2] + assert tokenize("#_ #_0 1 2") == [2] + # both + assert tokenize("#_1 2 #_3") == [2] + assert tokenize("#_0 #_1 2 #_ #_3 4") == [2] + # inside + assert tokenize("0 #_1 2") == [0, 2] + assert tokenize("0 #_1 #_2 3") == [0, 3] + assert tokenize("0 #_ #_1 2 3") == [0, 3] + # in HyList + assert tokenize("[]") == [HyList([])] + assert tokenize("[#_1]") == [HyList([])] + assert tokenize("[#_1 #_2]") == [HyList([])] + assert tokenize("[#_ #_1 2]") == [HyList([])] + assert tokenize("[0]") == [HyList([HyInteger(0)])] + assert tokenize("[0 #_1]") == [HyList([HyInteger(0)])] + assert tokenize("[0 #_1 #_2]") == [HyList([HyInteger(0)])] + assert tokenize("[2]") == [HyList([HyInteger(2)])] + assert tokenize("[#_1 2]") == [HyList([HyInteger(2)])] + assert tokenize("[#_0 #_1 2]") == [HyList([HyInteger(2)])] + assert tokenize("[#_ #_0 1 2]") == [HyList([HyInteger(2)])] + # in HySet + assert tokenize("#{}") == [HySet()] + assert tokenize("#{#_1}") == [HySet()] + assert tokenize("#{0 #_1}") == [HySet([HyInteger(0)])] + assert tokenize("#{#_1 0}") == [HySet([HyInteger(0)])] + # in HyDict + assert tokenize("{}") == [HyDict()] + assert tokenize("{#_1}") == [HyDict()] + assert tokenize("{#_0 1 2}") == [HyDict([HyInteger(1), HyInteger(2)])] + assert tokenize("{1 #_0 2}") == [HyDict([HyInteger(1), HyInteger(2)])] + assert tokenize("{1 2 #_0}") == [HyDict([HyInteger(1), HyInteger(2)])] + # in HyExpression + assert tokenize("()") == [HyExpression()] + assert tokenize("(#_foo)") == [HyExpression()] + assert tokenize("(#_foo bar)") == [HyExpression([HySymbol("bar")])] + assert tokenize("(foo #_bar)") == [HyExpression([HySymbol("foo")])] + assert tokenize("(foo :bar 1)") == [HyExpression([HySymbol("foo"), HyKeyword(":bar"), HyInteger(1)])] + assert tokenize("(foo #_:bar 1)") == [HyExpression([HySymbol("foo"), HyInteger(1)])] + assert tokenize("(foo :bar #_1)") == [HyExpression([HySymbol("foo"), HyKeyword(":bar")])] + # discard term with nesting + assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [ + HyList([HyInteger(1), HyInteger(2), HyInteger(3), HyInteger(4)]) + ] + # discard with other prefix syntax + assert tokenize("a #_'b c") == [HySymbol("a"), HySymbol("c")] + assert tokenize("a '#_b c") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("c")])] + assert tokenize("a '#_b #_c d") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("d")])] + assert tokenize("a '#_ #_b c d") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("d")])] From 4dd57c84eed2c9b902a64ae0c91733290d893684 Mon Sep 17 00:00:00 2001 From: gilch Date: Fri, 4 Aug 2017 15:59:34 -0600 Subject: [PATCH 4/4] NEWS and docs for (comment ...) and #_ --- NEWS | 2 ++ docs/language/api.rst | 49 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+) diff --git a/NEWS b/NEWS index 2c52152..e4d92ed 100644 --- a/NEWS +++ b/NEWS @@ -13,6 +13,8 @@ Changes from 0.13.0 ``(eval `(+ 1 ~(HyInteger n)))`` * Literal `Inf`s and `NaN`s must now be capitalized like that * `get` is available as a function + * new `comment` macro + * support EDN `#_` syntax to discard the next term [ Bug Fixes ] * Numeric literals are no longer parsed as symbols when followed by a dot diff --git a/docs/language/api.rst b/docs/language/api.rst index 3904e86..1f223f0 100644 --- a/docs/language/api.rst +++ b/docs/language/api.rst @@ -89,6 +89,31 @@ the error ``Keyword argument :foo needs a value``. To avoid this, you can quote the keyword, as in ``(f ':foo)``, or use it as the value of another keyword argument, as in ``(f :arg :foo)``. +discard prefix +-------------- + +Hy supports the Extensible Data Notation discard prefix, like Clojure. +Any form prefixed with ``#_`` is discarded instead of compiled. +This completely removes the form so it doesn't evaluate to anything, +not even None. +It's often more useful than linewise comments for commenting out a +form, because it respects code structure even when part of another +form is on the same line. For example: + +.. code-block:: clj + + => (print "Hy" "cruel" "World!") + Hy cruel World! + => (print "Hy" #_"cruel" "World!") + Hy World! + => (+ 1 1 (print "Math is hard!")) + Math is hard! + Traceback (most recent call last): + ... + TypeError: unsupported operand type(s) for +: 'int' and 'NoneType' + => (+ 1 1 #_(print "Math is hard!")) + 2 + Built-Ins ========= @@ -328,6 +353,30 @@ as the user enters *k*. (print "Try again"))) +comment +---- + +The ``comment`` macro ignores its body and always expands to ``None``. +Unlike linewise comments, the body of the ``comment`` macro must +be grammatically valid Hy, so the compiler can tell where the comment ends. +Besides the semicolon linewise comments, +Hy also has the ``#_`` discard prefix syntax to discard the next form. +This is completely discarded and doesn't expand to anything, not even ``None``. + +.. code-block:: clj + + => (print (comment

Suprise!

+ ...

You'd be surprised what's grammatically valid in Hy.

+ ...

(Keep delimiters in balance, and you're mostly good to go.)

) + ... "Hy") + None Hy + => (print #_(comment

Suprise!

+ ...

You'd be surprised what's grammatically valid in Hy.

+ ...

(Keep delimiters in balance, and you're mostly good to go.)

)) + ... "Hy") + Hy + + cond ----