Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions lib/productions/default.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { Base } from "./base";
import { const_data, const_value } from "./helpers";

export class Default extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const assign = tokeniser.consume("=");
if (!assign) {
return null;
}
const def = const_value(tokeniser) || tokeniser.consume("string", "null", "[") || tokeniser.error("No value for default");
const expression = [def];
if (def.type === "[") {
const close = tokeniser.consume("]") || error("Default sequence value must be empty");
expression.push(close);
}
return new Default({ source: tokeniser.source, tokens: { assign }, expression });
}

constructor({ source, tokens, expression }) {
super({ source, tokens });
Object.defineProperty(this, "expression", { value: expression });
}

get type() {
return const_data(this.expression[0]).type;
}
get value() {
return const_data(this.expression[0]).value;
}
get negative() {
return const_data(this.expression[0]).negative;
}
}
35 changes: 35 additions & 0 deletions lib/productions/helpers.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
/**
* @param {string} identifier
*/
export function unescape(identifier) {
return identifier.startsWith('_') ? identifier.slice(1) : identifier;
}
Expand Down Expand Up @@ -31,3 +34,35 @@ export function list(tokeniser, { parser, allowDangler, listName = "list" }) {
}
return items;
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
*/
export function const_value(tokeniser) {
return tokeniser.consume("true", "false", "Infinity", "-Infinity", "NaN", "decimal", "integer");
}

/**
* @param {object} token
* @param {string} token.type
* @param {string} token.value
*/
export function const_data({ type, value }) {
switch (type) {
case "true":
case "false":
return { type: "boolean", value: type === "true" };
case "Infinity":
case "-Infinity":
return { type: "Infinity", negative: type.startsWith("-") };
case "[":
return { type: "sequence", value: [] };
case "decimal":
case "integer":
return { type: "number", value };
case "string":
return { type: "string", value: value.slice(1, -1) };
default:
return { type };
}
}
64 changes: 5 additions & 59 deletions lib/webidl2.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ import { argumentNameKeywords, stringTypes, Tokeniser } from "./tokeniser.js";
import { Base } from "./productions/base.js";
import { Token } from "./productions/token.js";
import { Includes } from "./productions/includes.js";
import { list, unescape } from "./productions/helpers.js";
import { const_data, const_value, list, unescape } from "./productions/helpers.js";
import { Default } from "./productions/default.js";

/**
* @param {Tokeniser} tokeniser
Expand Down Expand Up @@ -62,30 +63,6 @@ function parseByTokens(tokeniser) {
}
}

function const_value() {
return consume("true", "false", "Infinity", "-Infinity", "NaN", DECIMAL, INT);
}

function const_data(token) {
switch (token.type) {
case "true":
case "false":
return { type: "boolean", value: token.type === "true" };
case "Infinity":
case "-Infinity":
return { type: "Infinity", negative: token.type.startsWith("-") };
case DECIMAL:
case INT:
return { type: "number", value: token.value };
case "[":
return { type: "sequence", value: [] };
case STR:
return { type: STR, value: token.value.slice(1, -1) };
default:
return { type: token.type };
}
}

function type_suffix(obj) {
const nullable = consume("?");
if (nullable) {
Expand Down Expand Up @@ -243,7 +220,7 @@ function parseByTokens(tokeniser) {
if (!tokens.name) {
return unconsume(start_position);
}
ret.default = tokens.optional ? Default.parse() : null;
ret.default = tokens.optional ? Default.parse(tokeniser) : null;
return ret;
}

Expand Down Expand Up @@ -357,37 +334,6 @@ function parseByTokens(tokeniser) {
}
}

class Default extends Base {
static parse() {
const assign = consume("=");
if (!assign) {
return null;
}
const def = const_value() || consume(STR, "null", "[") || error("No value for default");
const expression = [def];
if (def.type === "[") {
const close = consume("]") || error("Default sequence value must be empty");
expression.push(close);
}
return new Default({ source, tokens: { assign }, expression });
}

constructor({ source, tokens, expression }) {
super({ source, tokens });
Object.defineProperty(this, "expression", { value: expression });
}

get type() {
return const_data(this.expression[0]).type;
}
get value() {
return const_data(this.expression[0]).value;
}
get negative() {
return const_data(this.expression[0]).negative;
}
}

class Constant extends Base {
static parse() {
const tokens = {};
Expand All @@ -406,7 +352,7 @@ function parseByTokens(tokeniser) {
idlType.type = "const-type";
tokens.name = consume(ID) || error("No name for const");
tokens.assign = consume("=") || error("No value assignment for const");
tokens.value = const_value() || error("No value for const");
tokens.value = const_value(tokeniser) || error("No value for const");
tokens.termination = consume(";") || error("Unterminated const");
const ret = new Constant({ source, tokens });
ret.idlType = idlType;
Expand Down Expand Up @@ -800,7 +746,7 @@ function parseByTokens(tokeniser) {
tokens.required = consume("required");
ret.idlType = type_with_extended_attributes("dictionary-type") || error("No type for dictionary member");
tokens.name = consume(ID) || error("No name for dictionary member");
ret.default = Default.parse();
ret.default = Default.parse(tokeniser);
if (tokens.required && ret.default) error("Required member must not have a default");
tokens.termination = consume(";") || error("Unterminated dictionary member");
return ret;
Expand Down