fix: immediate hash in string is considered content instead of comment (#18)
This commit is contained in:
parent
e1aa4dd51b
commit
4993307f61
19
corpus/issues.txt
Normal file
19
corpus/issues.txt
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
================================================================================
|
||||||
|
VALID - issue #17 - immediate hash in string is considered content instead of comment
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
# This is a full-line comment
|
||||||
|
key = "value" # This is a comment at the end of a line
|
||||||
|
another = "# This is not a comment"
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
(document
|
||||||
|
(comment)
|
||||||
|
(pair
|
||||||
|
(bare_key)
|
||||||
|
(string)
|
||||||
|
(comment))
|
||||||
|
(pair
|
||||||
|
(bare_key)
|
||||||
|
(string)))
|
|
@ -40,7 +40,10 @@ module.exports = grammar({
|
||||||
),
|
),
|
||||||
|
|
||||||
comment: $ =>
|
comment: $ =>
|
||||||
token(seq("#", repeat(getInverseRegex(control_chars.subtract("\t"))))),
|
token(prec(-1, seq(
|
||||||
|
"#",
|
||||||
|
repeat(getInverseRegex(control_chars.subtract("\t"))),
|
||||||
|
))),
|
||||||
|
|
||||||
table: $ =>
|
table: $ =>
|
||||||
seq(
|
seq(
|
||||||
|
|
4
src/grammar.json
generated
4
src/grammar.json
generated
|
@ -40,6 +40,9 @@
|
||||||
},
|
},
|
||||||
"comment": {
|
"comment": {
|
||||||
"type": "TOKEN",
|
"type": "TOKEN",
|
||||||
|
"content": {
|
||||||
|
"type": "PREC",
|
||||||
|
"value": -1,
|
||||||
"content": {
|
"content": {
|
||||||
"type": "SEQ",
|
"type": "SEQ",
|
||||||
"members": [
|
"members": [
|
||||||
|
@ -56,6 +59,7 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"table": {
|
"table": {
|
||||||
"type": "SEQ",
|
"type": "SEQ",
|
||||||
|
|
1531
src/parser.c
generated
1531
src/parser.c
generated
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue