Initial commit

This commit is contained in:
Developer
2025-04-21 16:03:20 +02:00
commit 2832896157
22874 changed files with 3092801 additions and 0 deletions

View File

@@ -0,0 +1,40 @@
var TYPE = require('../../tokenizer').TYPE;
var RIGHTPARENTHESIS = TYPE.RightParenthesis;
// <function-token> <sequence> )
module.exports = {
name: 'Function',
structure: {
name: String,
children: [[]]
},
parse: function(readSequence, recognizer) {
var start = this.scanner.tokenStart;
var name = this.consumeFunctionName();
var nameLowerCase = name.toLowerCase();
var children;
children = recognizer.hasOwnProperty(nameLowerCase)
? recognizer[nameLowerCase].call(this, recognizer)
: readSequence.call(this, recognizer);
if (!this.scanner.eof) {
this.eat(RIGHTPARENTHESIS);
}
return {
type: 'Function',
loc: this.getLocation(start, this.scanner.tokenStart),
name: name,
children: children
};
},
generate: function(node) {
this.chunk(node.name);
this.chunk('(');
this.children(node);
this.chunk(')');
},
walkContext: 'function'
};