Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More embedded tokens #279

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
172 changes: 101 additions & 71 deletions lib/parse-js.js
Original file line number Diff line number Diff line change
Expand Up @@ -669,10 +669,12 @@ var ATOMIC_START_TOKEN = array_to_hash([ "atom", "num", "string", "regexp", "nam

/* -----[ Parser ]----- */

function NodeWithToken(str, start, end) {
function NodeWithToken(str, start, end, source) {
this.name = str;
this.start = start;
this.end = end;
this.source = source;
if (!source) throw new Error('no source')
};

NodeWithToken.prototype.toString = function() { return this.name; };
Expand Down Expand Up @@ -761,15 +763,25 @@ function parse($TEXT, exigent_mode, embed_tokens) {
return ex;
};

function add_tokens(str, start, end) {
return str instanceof NodeWithToken ? str : new NodeWithToken(str, start, end);
function add_tokens(str, start, end, source) {
if (!source) throw new Error('no source')
return str instanceof NodeWithToken ? str : new NodeWithToken(str, start, end, source);
};

function maybe_embed_tokens(parser) {
function maybe_embed_tokens(parser, options) {
options = options || {}
if (!options.source) throw new Error('no source')
if (embed_tokens) return function() {
var start = S.token;
// start is nully, a node or a token
var start = options.start ? (options.start[0] ? options.start[0].start : options.start) : S.token;
var ast = parser.apply(this, arguments);
ast[0] = add_tokens(ast[0], start, prev());
/*if (!start && options.start) {
throw new Error('invalid start, its a '+(options.start[0]?(options.start[0]+' node without [0].start'):'non-node'))
}*/
if (ast && start) {
var end = options.end || prev()
ast[0] = add_tokens(ast[0], start, end, options.source);
}
return ast;
};
else return parser;
Expand Down Expand Up @@ -871,7 +883,7 @@ function parse($TEXT, exigent_mode, embed_tokens) {
unexpected();
}
}
});
}, {source: 'statement'});

function labeled_statement(label) {
S.labels.push(label);
Expand Down Expand Up @@ -1061,47 +1073,56 @@ function parse($TEXT, exigent_mode, embed_tokens) {
};

function new_() {
var newexp = expr_atom(false), args;
if (is("punc", "(")) {
next();
args = expr_list(")");
} else {
args = [];
}
return subscripts(as("new", newexp, args), true);
};

var expr_atom = maybe_embed_tokens(function(allow_calls) {
if (is("operator", "new")) {
next();
return new_();
}
if (is("punc")) {
switch (S.token.value) {
case "(":
next();
return subscripts(prog1(expression, curry(expect, ")")), allow_calls);
case "[":
var newNode = maybe_embed_tokens(function() {
var newexp = expr_atom(false), args;
if (is("punc", "(")) {
next();
return subscripts(array_(), allow_calls);
case "{":
args = expr_list(")");
} else {
args = [];
}
return as("new", newexp, args);
}, {start: prev(), source: 'new_'})()
return subscripts(newNode, true);
};

function expr_atom(allow_calls) {
return subscripts(maybe_embed_tokens(function() {
if (is("operator", "new")) {
return maybe_embed_tokens(function() {
next();
return new_();
}, {source: 'expr_atom_inner'})();
}
if (is("punc")) {
switch (S.token.value) {
case "(":
next();
return prog1(expression, curry(expect, ")"));
case "[":
next();
return array_();
case "{":
next();
return object_();
}
unexpected();
}
if (is("keyword", "function")) {
next();
return subscripts(object_(), allow_calls);
return subscripts(function_(false), allow_calls);
}
if (HOP(ATOMIC_START_TOKEN, S.token.type)) {
var atom = maybe_embed_tokens(function() {
return S.token.type == "regexp"
? as("regexp", S.token.value[0], S.token.value[1])
: as(S.token.type, S.token.value);
}, {source: 'expr_atom_inner_2', start: S.token, end: S.token})()
return subscripts(prog1(atom, next), allow_calls);
}
unexpected();
}
if (is("keyword", "function")) {
next();
return subscripts(function_(false), allow_calls);
}
if (HOP(ATOMIC_START_TOKEN, S.token.type)) {
var atom = S.token.type == "regexp"
? as("regexp", S.token.value[0], S.token.value[1])
: as(S.token.type, S.token.value);
return subscripts(prog1(atom, next), allow_calls);
}
unexpected();
});
}, {source: 'expr_atom'})(), allow_calls);
};

function expr_list(closing, allow_trailing_comma, allow_empty) {
var first = true, a = [];
Expand Down Expand Up @@ -1151,7 +1172,7 @@ function parse($TEXT, exigent_mode, embed_tokens) {
return as_name();
};

function as_name() {
var as_name = maybe_embed_tokens(function() {
switch (S.token.type) {
case "name":
case "operator":
Expand All @@ -1161,25 +1182,30 @@ function parse($TEXT, exigent_mode, embed_tokens) {
default:
unexpected();
}
};
}, {source: 'as_name'});

function subscripts(expr, allow_calls) {
if (is("punc", ".")) {
next();
return subscripts(as("dot", expr, as_name()), allow_calls);
}
if (is("punc", "[")) {
next();
return subscripts(as("sub", expr, prog1(expression, curry(expect, "]"))), allow_calls);
}
if (allow_calls && is("punc", "(")) {
next();
return subscripts(as("call", expr, expr_list(")")), true);
var result = maybe_embed_tokens(function() {
if (is("punc", ".")) {
next();
return as("dot", expr, as_name());
}
if (is("punc", "[")) {
next();
return as("sub", expr, prog1(expression, curry(expect, "]")));
}
if (allow_calls && is("punc", "(")) {
next();
return as("call", expr, expr_list(")"));
}
}, {start: expr, source: 'subscripts'})()
if (result) {
return subscripts(result, allow_calls);
}
return expr;
};

function maybe_unary(allow_calls) {
var maybe_unary = maybe_embed_tokens(function(allow_calls) {
if (is("operator") && HOP(UNARY_PREFIX, S.token.value)) {
return make_unary("unary-prefix",
prog1(S.token.value, next),
Expand All @@ -1191,7 +1217,7 @@ function parse($TEXT, exigent_mode, embed_tokens) {
next();
}
return val;
};
}, {source: 'maybe_unary'});

function make_unary(tag, op, expr) {
if ((op == "++" || op == "--") && !is_assignable(expr))
Expand All @@ -1204,9 +1230,11 @@ function parse($TEXT, exigent_mode, embed_tokens) {
if (op && op == "in" && no_in) op = null;
var prec = op != null ? PRECEDENCE[op] : null;
if (prec != null && prec > min_prec) {
next();
var right = expr_op(maybe_unary(true), prec, no_in);
return expr_op(as("binary", op, left, right), min_prec, no_in);
return expr_op(maybe_embed_tokens(function() {
next();
var right = expr_op(maybe_unary(true), prec, no_in);
return as("binary", op, left, right);
}, {start: left, source: 'expr_op'})(), min_prec, no_in)
}
return left;
};
Expand Down Expand Up @@ -1240,15 +1268,17 @@ function parse($TEXT, exigent_mode, embed_tokens) {
};

function maybe_assign(no_in) {
var left = maybe_conditional(no_in), val = S.token.value;
if (is("operator") && HOP(ASSIGNMENT, val)) {
if (is_assignable(left)) {
next();
return as("assign", ASSIGNMENT[val], left, maybe_assign(no_in));
return maybe_embed_tokens(function() {
var left = maybe_conditional(no_in), val = S.token.value;
if (is("operator") && HOP(ASSIGNMENT, val)) {
if (is_assignable(left)) {
next();
return as("assign", ASSIGNMENT[val], left, maybe_assign(no_in));
}
croak("Invalid assignment");
}
croak("Invalid assignment");
}
return left;
return left;
}, {start: no_in, source: 'maybe_assign'})();
};

var expression = maybe_embed_tokens(function(commas, no_in) {
Expand All @@ -1260,7 +1290,7 @@ function parse($TEXT, exigent_mode, embed_tokens) {
return as("seq", expr, expression(true, no_in));
}
return expr;
});
}, {source: 'expression'});

function in_loop(cont) {
try {
Expand Down