feat: initial implementation

This commit is contained in:
Ika 2019-08-18 18:30:34 +08:00
commit 5acac3958f
21 changed files with 8892 additions and 0 deletions

9
.editorconfig Normal file
View file

@ -0,0 +1,9 @@
root = true
[*]
charset = utf-8
end_of_line = lf
indent_size = 2
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true

4
.gitattributes vendored Normal file
View file

@ -0,0 +1,4 @@
* text eol=lf
/src/** linguist-vendored
/examples/* linguist-vendored

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/build
/node_modules

0
.npmignore Normal file
View file

3
.prettierrc Normal file
View file

@ -0,0 +1,3 @@
{
"trailingComma": "es5"
}

16
.travis.yml Normal file
View file

@ -0,0 +1,16 @@
language: node_js
node_js:
- stable
script:
- yarn lint
- yarn test
cache:
yarn: true
directories:
- node_modules
matrix:
fast_finish: true

21
LICENSE Normal file
View file

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Ika <ikatyang@gmail.com> (https://github.com/ikatyang)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

39
README.md Normal file
View file

@ -0,0 +1,39 @@
# tree-sitter-toml
[![npm](https://img.shields.io/npm/v/tree-sitter-toml.svg)](https://www.npmjs.com/package/tree-sitter-toml)
[![build](https://img.shields.io/travis/com/ikatyang/tree-sitter-toml/master.svg)](https://travis-ci.com/ikatyang/tree-sitter-toml/builds)
TOML ([spec v0.5.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.5.0.md)) grammar for [tree-sitter](https://github.com/tree-sitter/tree-sitter)
[Changelog](https://github.com/ikatyang/tree-sitter-toml/blob/master/CHANGELOG.md)
## Install
```sh
npm install tree-sitter-toml tree-sitter
```
## Usage
```js
const Parser = require("tree-sitter");
const TOML = require("tree-sitter-toml");
const parser = new Parser();
parser.setLanguage(TOML);
const sourceCode = `
[hello]
world = true
`;
const tree = parser.parse(sourceCode);
console.log(tree.rootNode.toString());
// (root
// (table
// (key) (pair (key) (boolean))))
```
## License
MIT © [Ika](https://github.com/ikatyang)

19
binding.gyp Normal file
View file

@ -0,0 +1,19 @@
{
"targets": [
{
"target_name": "tree_sitter_toml_binding",
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src"
],
"sources": [
"src/parser.c",
"src/scanner.c",
"src/binding.cc"
],
"cflags_c": [
"-std=c99",
]
}
]
}

1195
corpus/spec.txt Normal file

File diff suppressed because it is too large Load diff

33
examples/toml-lang.toml vendored Normal file
View file

@ -0,0 +1,33 @@
# This is a TOML document.
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
dob = 1979-05-27T07:32:00-08:00 # First class dates
[database]
server = "192.168.1.1"
ports = [ 8001, 8001, 8002 ]
connection_max = 5000
enabled = true
[servers]
# Indentation (tabs and/or spaces) is allowed but not required
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
[clients]
data = [ ["gamma", "delta"], [1, 2] ]
# Line breaks are OK when inside arrays
hosts = [
"alpha",
"omega"
]

209
grammar.js Normal file
View file

@ -0,0 +1,209 @@
module.exports = grammar({
name: "toml",
externals: $ => [
$._eof,
$._basic_string_start,
$._basic_string_content,
$._basic_string_end,
$._multiline_basic_string_start,
$._multiline_basic_string_content,
$._multiline_basic_string_end,
$._literal_string_start,
$._literal_string_content,
$._literal_string_end,
$._multiline_literal_string_start,
$._multiline_literal_string_content,
$._multiline_literal_string_end,
],
extras: $ => [$.comment, /[ \t]/],
rules: {
root: $ =>
seq(
repeat($._newline),
choice(
$._eof,
seq(
choice($.pair, $._loose_pair, $._pairs, $.table, $.table_array),
repeat(choice($.table, $.table_array))
)
)
),
comment: $ => /#.*/,
_newline: $ => /(\r?\n)+/,
_newline_or_eof: $ => choice($._newline, $._eof),
...table_like("table", "[", "]"),
...table_like("table_array", "[[", "]]"),
pair: $ => seq($._inline_pair, $._newline_or_eof),
_inline_pair: $ => seq(choice($.dotted_key, $.key), "=", $._inline_value),
_loose_pair: $ => seq(choice($._loose_pair, $.pair), $._newline),
_pairs: $ =>
seq(
choice($.pair, $._loose_pair, $._pairs),
choice($.pair, $._loose_pair)
),
key: $ => choice($._bare_key, $._quoted_key),
dotted_key: $ => seq(choice($.dotted_key, $.key), ".", $.key),
_bare_key: $ => /[A-Za-z0-9_-]+/,
_quoted_key: $ => choice($._basic_string, $._literal_string),
_inline_value: $ =>
choice(
$.string,
$.integer,
$.float,
$.boolean,
$.offset_date_time,
$.local_date_time,
$.local_date,
$.local_time,
$.array,
$.inline_table
),
string: $ =>
choice(
$._basic_string,
$._multiline_basic_string,
$._literal_string,
$._multiline_literal_string
),
_basic_string: $ =>
seq(
$._basic_string_start,
repeat(choice($._basic_string_content, $.escape_sequence)),
$._basic_string_end
),
_multiline_basic_string: $ =>
seq(
$._multiline_basic_string_start,
repeat(
choice(
$._multiline_basic_string_content,
$.escape_sequence,
alias($._escape_line_ending, $.escape_sequence)
)
),
$._multiline_basic_string_end
),
escape_sequence: $ =>
token.immediate(
seq("\\", choice(/[btnfr"\\]/, /u[0-9a-fA-F]{4}/, /U[0-9a-fA-F]{8}/))
),
_escape_line_ending: $ => token.immediate(seq("\\", /\r?\n/)),
_literal_string: $ =>
seq(
$._literal_string_start,
repeat($._literal_string_content),
$._literal_string_end
),
_multiline_literal_string: $ =>
seq(
$._multiline_literal_string_start,
repeat($._multiline_literal_string_content),
$._multiline_literal_string_end
),
integer: $ =>
choice(
$._decimal_integer,
$._hexadecimal_integer,
$._octal_integer,
$._binary_integer
),
_decimal_integer: $ => /[+-]?(0|[1-9](_?[0-9])*)/,
_hexadecimal_integer: $ => /0x[0-9a-fA-F](_?[0-9a-fA-F])*/,
_octal_integer: $ => /0o[0-7](_?[0-7])*/,
_binary_integer: $ => /0b[01](_?[01])*/,
float: $ =>
choice(
seq(
$._decimal_integer,
choice(
seq($._float_fractional_part, optional($._float_exponent_part)),
$._float_exponent_part
)
),
/[+-]?(inf|nan)/
),
_float_fractional_part: $ => /[.][0-9](_?[0-9])*/,
_float_exponent_part: $ => seq(/[eE]/, $._decimal_integer),
boolean: $ => /true|false/,
offset_date_time: $ =>
seq(
$._rfc3339_date,
$._rfc3339_delimiter,
$._rfc3339_time,
$._rfc3339_offset
),
local_date_time: $ =>
seq($._rfc3339_date, $._rfc3339_delimiter, $._rfc3339_time),
local_date: $ => $._rfc3339_date,
local_time: $ => $._rfc3339_time,
_rfc3339_date: $ => /([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])/,
_rfc3339_delimiter: $ => /[ tT]/,
_rfc3339_time: $ =>
/([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)([.][0-9]+)?/,
_rfc3339_offset: $ => /([zZ])|([+-]([01][0-9]|2[0-3]):[0-5][0-9])/,
array: $ =>
seq(
"[",
repeat($._newline),
optional(
seq(
$._inline_value,
repeat($._newline),
repeat(
seq(",", repeat($._newline), $._inline_value, repeat($._newline))
),
optional(seq(",", repeat($._newline)))
)
),
"]"
),
inline_table: $ =>
seq(
"{",
optional(
seq(
alias($._inline_pair, $.pair),
repeat(seq(",", alias($._inline_pair, $.pair)))
)
),
"}"
),
},
});
function table_like(name, header_start, header_end) {
const header_name = `_${name}_header`;
const loose_header_name = `_loose_${name}_header`;
return {
[name]: $ =>
seq(
choice($[header_name], $[loose_header_name]),
optional(choice($.pair, $._loose_pair, $._pairs))
),
[header_name]: $ =>
seq(
header_start,
choice($.dotted_key, $.key),
header_end,
$._newline_or_eof
),
[loose_header_name]: $ =>
seq(choice($[loose_header_name], $[header_name]), $._newline),
};
}

13
index.js Normal file
View file

@ -0,0 +1,13 @@
try {
module.exports = require("./build/Release/tree_sitter_toml_binding");
} catch (error) {
try {
module.exports = require("./build/Debug/tree_sitter_toml_binding");
} catch (_) {
throw error
}
}
try {
module.exports.nodeTypeInfo = require("./src/node-types.json");
} catch (_) {}

38
package.json Normal file
View file

@ -0,0 +1,38 @@
{
"name": "tree-sitter-toml",
"version": "0.1.0",
"description": "TOML grammar for tree-sitter",
"keywords": [
"parser",
"lexer"
],
"main": "index.js",
"repository": "https://github.com/ikatyang/tree-sitter-toml",
"homepage": "https://github.com/ikatyang/tree-sitter-toml#readme",
"author": {
"name": "Ika",
"email": "ikatyang@gmail.com",
"url": "https://github.com/ikatyang"
},
"license": "MIT",
"scripts": {
"lint": "prettier grammar.js --check",
"test": "tree-sitter test && tree-sitter parse examples/*.toml --quiet --time",
"build": "tree-sitter generate && node-gyp build",
"release": "standard-version"
},
"dependencies": {
"nan": "^2.14.0"
},
"devDependencies": {
"prettier": "1.18.2",
"standard-version": "7.0.0",
"tree-sitter-cli": "0.15.7"
},
"files": [
"/src/",
"/binding.gyp",
"/grammar.js",
"/index.js"
]
}

28
src/binding.cc vendored Normal file
View file

@ -0,0 +1,28 @@
#include "tree_sitter/parser.h"
#include <node.h>
#include "nan.h"
using namespace v8;
extern "C" TSLanguage * tree_sitter_toml();
namespace {
NAN_METHOD(New) {}
void Init(Local<Object> exports, Local<Object> module) {
Local<FunctionTemplate> tpl = Nan::New<FunctionTemplate>(New);
tpl->SetClassName(Nan::New("Language").ToLocalChecked());
tpl->InstanceTemplate()->SetInternalFieldCount(1);
Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
Local<Object> instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
Nan::SetInternalFieldPointer(instance, 0, tree_sitter_toml());
Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("toml").ToLocalChecked());
Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
}
NODE_MODULE(tree_sitter_toml_binding, Init)
} // namespace

1043
src/grammar.json vendored Normal file

File diff suppressed because it is too large Load diff

323
src/node-types.json vendored Normal file
View file

@ -0,0 +1,323 @@
[
{
"type": "array",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "array",
"named": true
},
{
"type": "boolean",
"named": true
},
{
"type": "float",
"named": true
},
{
"type": "inline_table",
"named": true
},
{
"type": "integer",
"named": true
},
{
"type": "local_date",
"named": true
},
{
"type": "local_date_time",
"named": true
},
{
"type": "local_time",
"named": true
},
{
"type": "offset_date_time",
"named": true
},
{
"type": "string",
"named": true
}
]
}
},
{
"type": "dotted_key",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "dotted_key",
"named": true
},
{
"type": "key",
"named": true
}
]
}
},
{
"type": "float",
"named": true,
"fields": {}
},
{
"type": "inline_table",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "pair",
"named": true
}
]
}
},
{
"type": "integer",
"named": true,
"fields": {}
},
{
"type": "key",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "escape_sequence",
"named": true
}
]
}
},
{
"type": "local_date",
"named": true,
"fields": {}
},
{
"type": "local_date_time",
"named": true,
"fields": {}
},
{
"type": "local_time",
"named": true,
"fields": {}
},
{
"type": "offset_date_time",
"named": true,
"fields": {}
},
{
"type": "pair",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "array",
"named": true
},
{
"type": "boolean",
"named": true
},
{
"type": "dotted_key",
"named": true
},
{
"type": "float",
"named": true
},
{
"type": "inline_table",
"named": true
},
{
"type": "integer",
"named": true
},
{
"type": "key",
"named": true
},
{
"type": "local_date",
"named": true
},
{
"type": "local_date_time",
"named": true
},
{
"type": "local_time",
"named": true
},
{
"type": "offset_date_time",
"named": true
},
{
"type": "string",
"named": true
}
]
}
},
{
"type": "root",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "pair",
"named": true
},
{
"type": "table",
"named": true
},
{
"type": "table_array",
"named": true
}
]
}
},
{
"type": "string",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "escape_sequence",
"named": true
}
]
}
},
{
"type": "table",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "dotted_key",
"named": true
},
{
"type": "key",
"named": true
},
{
"type": "pair",
"named": true
}
]
}
},
{
"type": "table_array",
"named": true,
"fields": {},
"children": {
"multiple": true,
"required": false,
"types": [
{
"type": "dotted_key",
"named": true
},
{
"type": "key",
"named": true
},
{
"type": "pair",
"named": true
}
]
}
},
{
"type": "comment",
"named": true
},
{
"type": "[",
"named": false
},
{
"type": "]",
"named": false
},
{
"type": "[[",
"named": false
},
{
"type": "]]",
"named": false
},
{
"type": "=",
"named": false
},
{
"type": ".",
"named": false
},
{
"type": "escape_sequence",
"named": true
},
{
"type": "boolean",
"named": true
},
{
"type": ",",
"named": false
},
{
"type": "{",
"named": false
},
{
"type": "}",
"named": false
}
]

4125
src/parser.c vendored Normal file

File diff suppressed because it is too large Load diff

233
src/scanner.c vendored Normal file
View file

@ -0,0 +1,233 @@
#include <tree_sitter/parser.h>
#define SINGLE_QUOTE '\''
#define DOUBLE_QUOTE '"'
#define BACKSLASH '\\'
enum TokenType {
EOF,
BASIC_STRING_START,
BASIC_STRING_CONTENT,
BASIC_STRING_END,
MULTILINE_BASIC_STRING_START,
MULTILINE_BASIC_STRING_CONTENT,
MULTILINE_BASIC_STRING_END,
LITERAL_STRING_START,
LITERAL_STRING_CONTENT,
LITERAL_STRING_END,
MULTILINE_LITERAL_STRING_START,
MULTILINE_LITERAL_STRING_CONTENT,
MULTILINE_LITERAL_STRING_END
};
void *tree_sitter_toml_external_scanner_create() { return NULL; }
void tree_sitter_toml_external_scanner_destroy(void *p) {}
void tree_sitter_toml_external_scanner_reset(void *p) {}
unsigned tree_sitter_toml_external_scanner_serialize(void *p, char *buffer) { return 0; }
void tree_sitter_toml_external_scanner_deserialize(void *p, const char *b, unsigned n) {}
#define HANDLE_SINGLELINE_STRING_CONTENT( \
TOKEN_CHAR, \
STRING_CONTENT_TOKEN_TYPE, \
STRING_END_TOKEN_TYPE, \
LOOKAHEAD_WHITELIST_CONDITION, \
LOOKAHEAD_BLACKLIST_CONDITION \
) { \
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->advance(lexer, false); \
lexer->mark_end(lexer); \
lexer->result_symbol = STRING_END_TOKEN_TYPE; \
return true; \
} \
for( \
bool has_content = false;; \
has_content = true, lexer->advance(lexer, false) \
) { \
if ( \
(LOOKAHEAD_WHITELIST_CONDITION) \
|| (lexer->lookahead > 0x1f \
&& lexer->lookahead != 0x7f \
&& lexer->lookahead != TOKEN_CHAR \
&& !(LOOKAHEAD_BLACKLIST_CONDITION)) \
) { \
continue; \
} \
if (has_content) { \
lexer->mark_end(lexer); \
lexer->result_symbol = STRING_CONTENT_TOKEN_TYPE; \
return true; \
} \
break; \
} \
}
#define HANDLE_MULTILINE_STRING_CONTENT( \
TOKEN_CHAR, \
STRING_CONTENT_TOKEN_TYPE, \
STRING_END_TOKEN_TYPE, \
LOOKAHEAD_WHITELIST_CONDITION, \
LOOKAHEAD_BLACKLIST_CONDITION \
) { \
bool has_content = false; \
\
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->advance(lexer, false); \
has_content = true; \
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->advance(lexer, false); \
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->advance(lexer, false); \
lexer->mark_end(lexer); \
lexer->result_symbol = STRING_END_TOKEN_TYPE; \
return true; \
} \
} \
} \
\
for(;; has_content = true, lexer->advance(lexer, false)) { \
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->mark_end(lexer); \
lexer->advance(lexer, false); \
has_content = true; \
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->advance(lexer, false); \
if (lexer->lookahead == TOKEN_CHAR) { \
lexer->result_symbol = STRING_CONTENT_TOKEN_TYPE; \
return true; \
} \
} \
} \
\
if (lexer->lookahead == '\r') { \
lexer->mark_end(lexer); \
lexer->advance(lexer, false); \
if (lexer->lookahead == '\n') { \
has_content = true; \
} else if (!has_content) { \
return false; \
} else { \
lexer->result_symbol = STRING_CONTENT_TOKEN_TYPE; \
return true; \
} \
} \
\
if ( \
(LOOKAHEAD_WHITELIST_CONDITION) \
|| lexer->lookahead == '\r' \
|| lexer->lookahead == '\n' \
|| (lexer->lookahead > 0x1f \
&& lexer->lookahead != 0x7f \
&& lexer->lookahead != TOKEN_CHAR \
&& !(LOOKAHEAD_BLACKLIST_CONDITION)) \
) { \
continue; \
} \
\
if (has_content) { \
lexer->mark_end(lexer); \
lexer->result_symbol = STRING_CONTENT_TOKEN_TYPE; \
return true; \
} \
\
break; \
} \
}
bool tree_sitter_toml_external_scanner_scan(
void *payload,
TSLexer *lexer,
const bool *valid_symbols
) {
if (lexer->lookahead == 0 && valid_symbols[EOF]) {
lexer->result_symbol = EOF;
lexer->mark_end(lexer);
return true;
}
if (valid_symbols[BASIC_STRING_CONTENT]) {
HANDLE_SINGLELINE_STRING_CONTENT(
DOUBLE_QUOTE,
BASIC_STRING_CONTENT,
BASIC_STRING_END,
false,
lexer->lookahead == BACKSLASH
);
}
if (valid_symbols[MULTILINE_BASIC_STRING_CONTENT]) {
HANDLE_MULTILINE_STRING_CONTENT(
DOUBLE_QUOTE,
MULTILINE_BASIC_STRING_CONTENT,
MULTILINE_BASIC_STRING_END,
false,
lexer->lookahead == BACKSLASH
);
}
if (valid_symbols[LITERAL_STRING_CONTENT]) {
HANDLE_SINGLELINE_STRING_CONTENT(
SINGLE_QUOTE,
LITERAL_STRING_CONTENT,
LITERAL_STRING_END,
lexer->lookahead == '\t',
false
);
}
if (valid_symbols[MULTILINE_LITERAL_STRING_CONTENT]) {
HANDLE_MULTILINE_STRING_CONTENT(
SINGLE_QUOTE,
MULTILINE_LITERAL_STRING_CONTENT,
MULTILINE_LITERAL_STRING_END,
lexer->lookahead == '\t',
false
);
}
if (
valid_symbols[BASIC_STRING_START]
|| valid_symbols[MULTILINE_BASIC_STRING_START]
|| valid_symbols[LITERAL_STRING_START]
|| valid_symbols[MULTILINE_LITERAL_STRING_START]
) {
while (lexer->lookahead == ' ' || lexer->lookahead == '\t') {
lexer->advance(lexer, true);
}
int32_t token_char = 0;
enum TokenType single_token_type;
enum TokenType triple_token_type;
if (lexer->lookahead == SINGLE_QUOTE) {
token_char = SINGLE_QUOTE;
single_token_type = LITERAL_STRING_START;
triple_token_type = MULTILINE_LITERAL_STRING_START;
} else if (lexer->lookahead == DOUBLE_QUOTE) {
token_char = DOUBLE_QUOTE;
single_token_type = BASIC_STRING_START;
triple_token_type = MULTILINE_BASIC_STRING_START;
}
if (token_char != 0) {
lexer->advance(lexer, false);
lexer->mark_end(lexer);
lexer->result_symbol = single_token_type;
if (valid_symbols[triple_token_type]) {
if (lexer->lookahead == token_char) {
lexer->advance(lexer, false);
if (lexer->lookahead == token_char) {
lexer->advance(lexer, false);
lexer->mark_end(lexer);
lexer->result_symbol = triple_token_type;
return true;
}
}
}
if (valid_symbols[single_token_type]) {
return true;
}
}
}
return false;
}

215
src/tree_sitter/parser.h vendored Normal file
View file

@ -0,0 +1,215 @@
#ifndef TREE_SITTER_PARSER_H_
#define TREE_SITTER_PARSER_H_
#ifdef __cplusplus
extern "C" {
#endif
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#define ts_builtin_sym_error ((TSSymbol)-1)
#define ts_builtin_sym_end 0
#define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
#ifndef TREE_SITTER_API_H_
typedef uint16_t TSSymbol;
typedef uint16_t TSFieldId;
typedef struct TSLanguage TSLanguage;
#endif
typedef struct {
TSFieldId field_id;
uint8_t child_index;
bool inherited;
} TSFieldMapEntry;
typedef struct {
uint16_t index;
uint16_t length;
} TSFieldMapSlice;
typedef uint16_t TSStateId;
typedef struct {
bool visible : 1;
bool named : 1;
} TSSymbolMetadata;
typedef struct TSLexer TSLexer;
struct TSLexer {
int32_t lookahead;
TSSymbol result_symbol;
void (*advance)(TSLexer *, bool);
void (*mark_end)(TSLexer *);
uint32_t (*get_column)(TSLexer *);
bool (*is_at_included_range_start)(TSLexer *);
};
typedef enum {
TSParseActionTypeShift,
TSParseActionTypeReduce,
TSParseActionTypeAccept,
TSParseActionTypeRecover,
} TSParseActionType;
typedef struct {
union {
struct {
TSStateId state;
bool extra : 1;
bool repetition : 1;
};
struct {
TSSymbol symbol;
int16_t dynamic_precedence;
uint8_t child_count;
uint8_t production_id;
};
} params;
TSParseActionType type : 4;
} TSParseAction;
typedef struct {
uint16_t lex_state;
uint16_t external_lex_state;
} TSLexMode;
typedef union {
TSParseAction action;
struct {
uint8_t count;
bool reusable : 1;
};
} TSParseActionEntry;
struct TSLanguage {
uint32_t version;
uint32_t symbol_count;
uint32_t alias_count;
uint32_t token_count;
uint32_t external_token_count;
const char **symbol_names;
const TSSymbolMetadata *symbol_metadata;
const uint16_t *parse_table;
const TSParseActionEntry *parse_actions;
const TSLexMode *lex_modes;
const TSSymbol *alias_sequences;
uint16_t max_alias_sequence_length;
bool (*lex_fn)(TSLexer *, TSStateId);
bool (*keyword_lex_fn)(TSLexer *, TSStateId);
TSSymbol keyword_capture_token;
struct {
const bool *states;
const TSSymbol *symbol_map;
void *(*create)(void);
void (*destroy)(void *);
bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
unsigned (*serialize)(void *, char *);
void (*deserialize)(void *, const char *, unsigned);
} external_scanner;
uint32_t field_count;
const TSFieldMapSlice *field_map_slices;
const TSFieldMapEntry *field_map_entries;
const char **field_names;
};
/*
* Lexer Macros
*/
#define START_LEXER() \
bool result = false; \
bool skip = false; \
int32_t lookahead; \
goto start; \
next_state: \
lexer->advance(lexer, skip); \
start: \
skip = false; \
lookahead = lexer->lookahead;
#define ADVANCE(state_value) \
{ \
state = state_value; \
goto next_state; \
}
#define SKIP(state_value) \
{ \
skip = true; \
state = state_value; \
goto next_state; \
}
#define ACCEPT_TOKEN(symbol_value) \
result = true; \
lexer->result_symbol = symbol_value; \
lexer->mark_end(lexer);
#define END_STATE() return result;
/*
* Parse Table Macros
*/
#define STATE(id) id
#define ACTIONS(id) id
#define SHIFT(state_value) \
{ \
{ \
.type = TSParseActionTypeShift, \
.params = {.state = state_value}, \
} \
}
#define SHIFT_REPEAT(state_value) \
{ \
{ \
.type = TSParseActionTypeShift, \
.params = { \
.state = state_value, \
.repetition = true \
}, \
} \
}
#define RECOVER() \
{ \
{ .type = TSParseActionTypeRecover } \
}
#define SHIFT_EXTRA() \
{ \
{ \
.type = TSParseActionTypeShift, \
.params = {.extra = true} \
} \
}
#define REDUCE(symbol_val, child_count_val, ...) \
{ \
{ \
.type = TSParseActionTypeReduce, \
.params = { \
.symbol = symbol_val, \
.child_count = child_count_val, \
__VA_ARGS__ \
} \
} \
}
#define ACCEPT_INPUT() \
{ \
{ .type = TSParseActionTypeAccept } \
}
#ifdef __cplusplus
}
#endif
#endif // TREE_SITTER_PARSER_H_

1324
yarn.lock Normal file

File diff suppressed because it is too large Load diff