From 0786b59521a80051e4232534855a2a22e89f5f79 Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Sat, 13 Jul 2019 12:45:19 +0900 Subject: [PATCH 1/3] fix: require dictionary default value only when optional --- lib/productions/operation.js | 2 +- test/invalid/baseline/operation-dict-default.txt | 6 +++--- test/invalid/idl/operation-dict-default.webidl | 5 +++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/productions/operation.js b/lib/productions/operation.js index d7c3e6c6..42e18d34 100644 --- a/lib/productions/operation.js +++ b/lib/productions/operation.js @@ -49,7 +49,7 @@ export class Operation extends Base { *validate(defs) { for (const argument of this.arguments) { if (idlTypeIncludesDictionary(argument.idlType, defs)) { - if (!argument.default) { + if (argument.optional && !argument.default) { const message = `Optional dictionary arguments must have a default value of \`{}\`.`; yield validationError(this.source, argument.tokens.name, this, message); } diff --git a/test/invalid/baseline/operation-dict-default.txt b/test/invalid/baseline/operation-dict-default.txt index 2241d4b4..6eb5bc8f 100644 --- a/test/invalid/baseline/operation-dict-default.txt +++ b/test/invalid/baseline/operation-dict-default.txt @@ -1,9 +1,9 @@ -Validation error at line 9 in operation-dict-default.webidl, inside `operation x`: +Validation error at line 13 in operation-dict-default.webidl, inside `operation x`: void x(optional Dict dict); ^ Optional dictionary arguments must have a default value of `{}`. -Validation error at line 11 in operation-dict-default.webidl, inside `operation y`: +Validation error at line 15 in operation-dict-default.webidl, inside `operation y`: (boolean or Dict) union); ^ Optional dictionary arguments must have a default value of `{}`. -Validation error at line 13 in operation-dict-default.webidl, inside `operation z`: +Validation error at line 17 in operation-dict-default.webidl, inside `operation z`: void z(optional Union union); ^ Optional dictionary arguments must have a default value of `{}`. diff --git a/test/invalid/idl/operation-dict-default.webidl b/test/invalid/idl/operation-dict-default.webidl index 6640100b..c75a8f15 100644 --- a/test/invalid/idl/operation-dict-default.webidl +++ b/test/invalid/idl/operation-dict-default.webidl @@ -2,6 +2,10 @@ dictionary Dict { short x = 0; }; +dictionary Required { + required short x; +}; + typedef (short or Dict) Union; [Exposed=Window] @@ -12,4 +16,5 @@ interface X { void y2(optional (boolean or Dict) union = {}); void z(optional Union union); void z2(optional Union union = {}); + void r(Required req); }; From bb58ca584aff1aa724d91a136489c327a7be7452 Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Sat, 13 Jul 2019 13:06:10 +0900 Subject: [PATCH 2/3] move argument validation into argument --- README.md | 1 + lib/productions/argument.js | 14 ++++++++++++++ lib/productions/operation.js | 9 +-------- test/invalid/baseline/operation-dict-default.txt | 6 +++--- test/syntax/baseline/allowany.json | 2 ++ test/syntax/baseline/argument-extattrs.json | 1 + test/syntax/baseline/callback.json | 4 ++++ test/syntax/baseline/constructor.json | 1 + test/syntax/baseline/enum.json | 2 ++ test/syntax/baseline/equivalent-decl.json | 9 +++++++++ test/syntax/baseline/extended-attributes.json | 1 + test/syntax/baseline/getter-setter.json | 3 +++ .../baseline/identifier-qualified-names.json | 3 +++ test/syntax/baseline/indexed-properties.json | 8 ++++++++ test/syntax/baseline/namedconstructor.json | 1 + test/syntax/baseline/namespace.json | 4 ++++ test/syntax/baseline/nointerfaceobject.json | 1 + test/syntax/baseline/nullableobjects.json | 2 ++ test/syntax/baseline/operation-optional-arg.json | 4 ++++ test/syntax/baseline/overloading.json | 10 ++++++++++ test/syntax/baseline/overridebuiltins.json | 1 + test/syntax/baseline/record.json | 2 ++ test/syntax/baseline/reg-operations.json | 3 +++ test/syntax/baseline/sequence.json | 2 ++ test/syntax/baseline/static.json | 3 +++ test/syntax/baseline/treatasnull.json | 1 + test/syntax/baseline/treatasundefined.json | 1 + test/syntax/baseline/typedef.json | 2 ++ test/syntax/baseline/typesuffixes.json | 1 + test/syntax/baseline/variadic-operations.json | 2 ++ 30 files changed, 93 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index eacd761e..df62feca 100644 --- a/README.md +++ b/README.md @@ -576,6 +576,7 @@ The arguments (e.g. for an operation) look like this: ```JS { "arguments": [{ + "type": "argument", "optional": false, "variadic": true "extAttrs": [] diff --git a/lib/productions/argument.js b/lib/productions/argument.js index 8afe7b57..5292bb2b 100644 --- a/lib/productions/argument.js +++ b/lib/productions/argument.js @@ -3,6 +3,8 @@ import { Default } from "./default.js"; import { ExtendedAttributes } from "./extended-attributes.js"; import { unescape, type_with_extended_attributes } from "./helpers.js"; import { argumentNameKeywords } from "../tokeniser.js"; +import { validationError } from "../error.js"; +import { idlTypeIncludesDictionary } from "../validators/helpers.js"; export class Argument extends Base { /** @@ -29,6 +31,9 @@ export class Argument extends Base { return ret; } + get type() { + return "argument"; + } get optional() { return !!this.tokens.optional; } @@ -38,4 +43,13 @@ export class Argument extends Base { get name() { return unescape(this.tokens.name.value); } + + *validate(defs) { + if (idlTypeIncludesDictionary(this.idlType, defs)) { + if (this.optional && !this.default) { + const message = `Optional dictionary arguments must have a default value of \`{}\`.`; + yield validationError(this.source, this.tokens.name, this, message); + } + } + } } diff --git a/lib/productions/operation.js b/lib/productions/operation.js index 42e18d34..40478756 100644 --- a/lib/productions/operation.js +++ b/lib/productions/operation.js @@ -1,7 +1,5 @@ import { Base } from "./base.js"; import { return_type, argument_list, unescape } from "./helpers.js"; -import { validationError } from "../error.js"; -import { idlTypeIncludesDictionary } from "../validators/helpers.js"; export class Operation extends Base { /** @@ -48,12 +46,7 @@ export class Operation extends Base { *validate(defs) { for (const argument of this.arguments) { - if (idlTypeIncludesDictionary(argument.idlType, defs)) { - if (argument.optional && !argument.default) { - const message = `Optional dictionary arguments must have a default value of \`{}\`.`; - yield validationError(this.source, argument.tokens.name, this, message); - } - } + yield* argument.validate(defs); } } } diff --git a/test/invalid/baseline/operation-dict-default.txt b/test/invalid/baseline/operation-dict-default.txt index 6eb5bc8f..a1b8f457 100644 --- a/test/invalid/baseline/operation-dict-default.txt +++ b/test/invalid/baseline/operation-dict-default.txt @@ -1,9 +1,9 @@ -Validation error at line 13 in operation-dict-default.webidl, inside `operation x`: +Validation error at line 13 in operation-dict-default.webidl, inside `argument dict`: void x(optional Dict dict); ^ Optional dictionary arguments must have a default value of `{}`. -Validation error at line 15 in operation-dict-default.webidl, inside `operation y`: +Validation error at line 15 in operation-dict-default.webidl, inside `argument union`: (boolean or Dict) union); ^ Optional dictionary arguments must have a default value of `{}`. -Validation error at line 17 in operation-dict-default.webidl, inside `operation z`: +Validation error at line 17 in operation-dict-default.webidl, inside `argument union`: void z(optional Union union); ^ Optional dictionary arguments must have a default value of `{}`. diff --git a/test/syntax/baseline/allowany.json b/test/syntax/baseline/allowany.json index 54ec3a16..24fb1df7 100644 --- a/test/syntax/baseline/allowany.json +++ b/test/syntax/baseline/allowany.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "b", "extAttrs": [], "idlType": { @@ -63,6 +64,7 @@ }, "arguments": [ { + "type": "argument", "name": "s", "extAttrs": [ { diff --git a/test/syntax/baseline/argument-extattrs.json b/test/syntax/baseline/argument-extattrs.json index 555db58b..44655164 100644 --- a/test/syntax/baseline/argument-extattrs.json +++ b/test/syntax/baseline/argument-extattrs.json @@ -17,6 +17,7 @@ }, "arguments": [ { + "type": "argument", "name": "argname", "extAttrs": [ { diff --git a/test/syntax/baseline/callback.json b/test/syntax/baseline/callback.json index 6eed8613..44fdfcf9 100644 --- a/test/syntax/baseline/callback.json +++ b/test/syntax/baseline/callback.json @@ -12,6 +12,7 @@ }, "arguments": [ { + "type": "argument", "name": "status", "extAttrs": [], "idlType": { @@ -47,6 +48,7 @@ }, "arguments": [ { + "type": "argument", "name": "details", "extAttrs": [], "idlType": { @@ -82,6 +84,7 @@ }, "arguments": [ { + "type": "argument", "name": "a", "extAttrs": [], "idlType": { @@ -97,6 +100,7 @@ "variadic": false }, { + "type": "argument", "name": "b", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/constructor.json b/test/syntax/baseline/constructor.json index e90eb899..b96a7d86 100644 --- a/test/syntax/baseline/constructor.json +++ b/test/syntax/baseline/constructor.json @@ -78,6 +78,7 @@ "rhs": null, "arguments": [ { + "type": "argument", "name": "radius", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/enum.json b/test/syntax/baseline/enum.json index 18360cb2..ea2c37c5 100644 --- a/test/syntax/baseline/enum.json +++ b/test/syntax/baseline/enum.json @@ -66,6 +66,7 @@ }, "arguments": [ { + "type": "argument", "name": "type", "extAttrs": [], "idlType": { @@ -81,6 +82,7 @@ "variadic": false }, { + "type": "argument", "name": "size", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/equivalent-decl.json b/test/syntax/baseline/equivalent-decl.json index b57c3615..9611a117 100644 --- a/test/syntax/baseline/equivalent-decl.json +++ b/test/syntax/baseline/equivalent-decl.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -63,6 +64,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -78,6 +80,7 @@ "variadic": false }, { + "type": "argument", "name": "propertyValue", "extAttrs": [], "idlType": { @@ -133,6 +136,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -164,6 +168,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -179,6 +184,7 @@ "variadic": false }, { + "type": "argument", "name": "propertyValue", "extAttrs": [], "idlType": { @@ -210,6 +216,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -241,6 +248,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -256,6 +264,7 @@ "variadic": false }, { + "type": "argument", "name": "propertyValue", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/extended-attributes.json b/test/syntax/baseline/extended-attributes.json index b3081ee5..4fb35fdb 100644 --- a/test/syntax/baseline/extended-attributes.json +++ b/test/syntax/baseline/extended-attributes.json @@ -148,6 +148,7 @@ "rhs": null, "arguments": [ { + "type": "argument", "name": "radius", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/getter-setter.json b/test/syntax/baseline/getter-setter.json index b5c1ef42..32b14f32 100644 --- a/test/syntax/baseline/getter-setter.json +++ b/test/syntax/baseline/getter-setter.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -63,6 +64,7 @@ }, "arguments": [ { + "type": "argument", "name": "propertyName", "extAttrs": [], "idlType": { @@ -78,6 +80,7 @@ "variadic": false }, { + "type": "argument", "name": "propertyValue", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/identifier-qualified-names.json b/test/syntax/baseline/identifier-qualified-names.json index 021d5000..fd8d6971 100644 --- a/test/syntax/baseline/identifier-qualified-names.json +++ b/test/syntax/baseline/identifier-qualified-names.json @@ -30,6 +30,7 @@ }, "arguments": [ { + "type": "argument", "name": "interface", "extAttrs": [], "idlType": { @@ -61,6 +62,7 @@ }, "arguments": [ { + "type": "argument", "name": "keyName", "extAttrs": [], "idlType": { @@ -140,6 +142,7 @@ }, "arguments": [ { + "type": "argument", "name": "callback", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/indexed-properties.json b/test/syntax/baseline/indexed-properties.json index fd1d1a23..6ac5b738 100644 --- a/test/syntax/baseline/indexed-properties.json +++ b/test/syntax/baseline/indexed-properties.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "index", "extAttrs": [], "idlType": { @@ -63,6 +64,7 @@ }, "arguments": [ { + "type": "argument", "name": "index", "extAttrs": [], "idlType": { @@ -78,6 +80,7 @@ "variadic": false }, { + "type": "argument", "name": "value", "extAttrs": [], "idlType": { @@ -109,6 +112,7 @@ }, "arguments": [ { + "type": "argument", "name": "index", "extAttrs": [], "idlType": { @@ -140,6 +144,7 @@ }, "arguments": [ { + "type": "argument", "name": "name", "extAttrs": [], "idlType": { @@ -171,6 +176,7 @@ }, "arguments": [ { + "type": "argument", "name": "name", "extAttrs": [], "idlType": { @@ -186,6 +192,7 @@ "variadic": false }, { + "type": "argument", "name": "value", "extAttrs": [], "idlType": { @@ -217,6 +224,7 @@ }, "arguments": [ { + "type": "argument", "name": "name", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/namedconstructor.json b/test/syntax/baseline/namedconstructor.json index 94e192df..7e0ab150 100644 --- a/test/syntax/baseline/namedconstructor.json +++ b/test/syntax/baseline/namedconstructor.json @@ -23,6 +23,7 @@ }, "arguments": [ { + "type": "argument", "name": "src", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/namespace.json b/test/syntax/baseline/namespace.json index f02282be..bf52e63c 100644 --- a/test/syntax/baseline/namespace.json +++ b/test/syntax/baseline/namespace.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "x", "extAttrs": [], "idlType": { @@ -47,6 +48,7 @@ "variadic": false }, { + "type": "argument", "name": "y", "extAttrs": [], "idlType": { @@ -78,6 +80,7 @@ }, "arguments": [ { + "type": "argument", "name": "x", "extAttrs": [], "idlType": { @@ -93,6 +96,7 @@ "variadic": false }, { + "type": "argument", "name": "y", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/nointerfaceobject.json b/test/syntax/baseline/nointerfaceobject.json index 25b482ef..5ae3d2fa 100644 --- a/test/syntax/baseline/nointerfaceobject.json +++ b/test/syntax/baseline/nointerfaceobject.json @@ -17,6 +17,7 @@ }, "arguments": [ { + "type": "argument", "name": "key", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/nullableobjects.json b/test/syntax/baseline/nullableobjects.json index 74cf78a6..69c80928 100644 --- a/test/syntax/baseline/nullableobjects.json +++ b/test/syntax/baseline/nullableobjects.json @@ -33,6 +33,7 @@ }, "arguments": [ { + "type": "argument", "name": "x", "extAttrs": [], "idlType": { @@ -64,6 +65,7 @@ }, "arguments": [ { + "type": "argument", "name": "x", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/operation-optional-arg.json b/test/syntax/baseline/operation-optional-arg.json index 187c5793..2744a146 100644 --- a/test/syntax/baseline/operation-optional-arg.json +++ b/test/syntax/baseline/operation-optional-arg.json @@ -17,6 +17,7 @@ }, "arguments": [ { + "type": "argument", "name": "v1", "extAttrs": [], "idlType": { @@ -32,6 +33,7 @@ "variadic": false }, { + "type": "argument", "name": "v2", "extAttrs": [], "idlType": { @@ -47,6 +49,7 @@ "variadic": false }, { + "type": "argument", "name": "v3", "extAttrs": [], "idlType": { @@ -62,6 +65,7 @@ "variadic": false }, { + "type": "argument", "name": "alpha", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/overloading.json b/test/syntax/baseline/overloading.json index 224feffd..0650762e 100644 --- a/test/syntax/baseline/overloading.json +++ b/test/syntax/baseline/overloading.json @@ -33,6 +33,7 @@ }, "arguments": [ { + "type": "argument", "name": "x", "extAttrs": [], "idlType": { @@ -64,6 +65,7 @@ }, "arguments": [ { + "type": "argument", "name": "x", "extAttrs": [], "idlType": { @@ -104,6 +106,7 @@ }, "arguments": [ { + "type": "argument", "name": "a", "extAttrs": [], "idlType": { @@ -135,6 +138,7 @@ }, "arguments": [ { + "type": "argument", "name": "a", "extAttrs": [ { @@ -157,6 +161,7 @@ "variadic": false }, { + "type": "argument", "name": "b", "extAttrs": [], "idlType": { @@ -172,6 +177,7 @@ "variadic": false }, { + "type": "argument", "name": "c", "extAttrs": [], "idlType": { @@ -218,6 +224,7 @@ }, "arguments": [ { + "type": "argument", "name": "a", "extAttrs": [], "idlType": { @@ -233,6 +240,7 @@ "variadic": false }, { + "type": "argument", "name": "b", "extAttrs": [], "idlType": { @@ -248,6 +256,7 @@ "variadic": false }, { + "type": "argument", "name": "c", "extAttrs": [], "idlType": { @@ -263,6 +272,7 @@ "variadic": false }, { + "type": "argument", "name": "d", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/overridebuiltins.json b/test/syntax/baseline/overridebuiltins.json index c742e343..5c793bfa 100644 --- a/test/syntax/baseline/overridebuiltins.json +++ b/test/syntax/baseline/overridebuiltins.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "key", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/record.json b/test/syntax/baseline/record.json index 53d7509e..ec6d3de8 100644 --- a/test/syntax/baseline/record.json +++ b/test/syntax/baseline/record.json @@ -17,6 +17,7 @@ }, "arguments": [ { + "type": "argument", "name": "param", "extAttrs": [], "idlType": { @@ -118,6 +119,7 @@ "rhs": null, "arguments": [ { + "type": "argument", "name": "init", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/reg-operations.json b/test/syntax/baseline/reg-operations.json index a05387c0..af8b6b5f 100644 --- a/test/syntax/baseline/reg-operations.json +++ b/test/syntax/baseline/reg-operations.json @@ -71,6 +71,7 @@ }, "arguments": [ { + "type": "argument", "name": "size", "extAttrs": [], "idlType": { @@ -102,6 +103,7 @@ }, "arguments": [ { + "type": "argument", "name": "width", "extAttrs": [], "idlType": { @@ -117,6 +119,7 @@ "variadic": false }, { + "type": "argument", "name": "height", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/sequence.json b/test/syntax/baseline/sequence.json index a0f8a1bf..b680df20 100644 --- a/test/syntax/baseline/sequence.json +++ b/test/syntax/baseline/sequence.json @@ -17,6 +17,7 @@ }, "arguments": [ { + "type": "argument", "name": "coordinates", "extAttrs": [], "idlType": { @@ -90,6 +91,7 @@ }, "arguments": [ { + "type": "argument", "name": "arg", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/static.json b/test/syntax/baseline/static.json index 6057c454..a65dada5 100644 --- a/test/syntax/baseline/static.json +++ b/test/syntax/baseline/static.json @@ -85,6 +85,7 @@ }, "arguments": [ { + "type": "argument", "name": "c1", "extAttrs": [], "idlType": { @@ -100,6 +101,7 @@ "variadic": false }, { + "type": "argument", "name": "c2", "extAttrs": [], "idlType": { @@ -115,6 +117,7 @@ "variadic": false }, { + "type": "argument", "name": "c3", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/treatasnull.json b/test/syntax/baseline/treatasnull.json index bcd2bf44..85e90b0c 100644 --- a/test/syntax/baseline/treatasnull.json +++ b/test/syntax/baseline/treatasnull.json @@ -47,6 +47,7 @@ }, "arguments": [ { + "type": "argument", "name": "breedName", "extAttrs": [ { diff --git a/test/syntax/baseline/treatasundefined.json b/test/syntax/baseline/treatasundefined.json index eb26932b..62069cb9 100644 --- a/test/syntax/baseline/treatasundefined.json +++ b/test/syntax/baseline/treatasundefined.json @@ -47,6 +47,7 @@ }, "arguments": [ { + "type": "argument", "name": "breedName", "extAttrs": [ { diff --git a/test/syntax/baseline/typedef.json b/test/syntax/baseline/typedef.json index 0bd91d28..b07da0e3 100644 --- a/test/syntax/baseline/typedef.json +++ b/test/syntax/baseline/typedef.json @@ -132,6 +132,7 @@ }, "arguments": [ { + "type": "argument", "name": "p", "extAttrs": [], "idlType": { @@ -163,6 +164,7 @@ }, "arguments": [ { + "type": "argument", "name": "ps", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/typesuffixes.json b/test/syntax/baseline/typesuffixes.json index 30cb76a2..136486cc 100644 --- a/test/syntax/baseline/typesuffixes.json +++ b/test/syntax/baseline/typesuffixes.json @@ -17,6 +17,7 @@ }, "arguments": [ { + "type": "argument", "name": "foo", "extAttrs": [], "idlType": { diff --git a/test/syntax/baseline/variadic-operations.json b/test/syntax/baseline/variadic-operations.json index 8f5a3dc4..991ac502 100644 --- a/test/syntax/baseline/variadic-operations.json +++ b/test/syntax/baseline/variadic-operations.json @@ -32,6 +32,7 @@ }, "arguments": [ { + "type": "argument", "name": "ints", "extAttrs": [], "idlType": { @@ -63,6 +64,7 @@ }, "arguments": [ { + "type": "argument", "name": "ints", "extAttrs": [], "idlType": { From ff9add7b163c689ef29f60b5ee0253afe7a4498a Mon Sep 17 00:00:00 2001 From: Kagami Sascha Rosylight Date: Sat, 13 Jul 2019 13:10:18 +0900 Subject: [PATCH 3/3] rename relevant test --- dist/webidl2.js | 2 +- dist/webidl2.js.map | 2 +- ...operation-dict-default.txt => argument-dict-default.txt} | 6 +++--- ...ion-dict-default.webidl => argument-dict-default.webidl} | 0 4 files changed, 5 insertions(+), 5 deletions(-) rename test/invalid/baseline/{operation-dict-default.txt => argument-dict-default.txt} (57%) rename test/invalid/idl/{operation-dict-default.webidl => argument-dict-default.webidl} (100%) diff --git a/dist/webidl2.js b/dist/webidl2.js index bd0ef4b5..23c6a685 100644 --- a/dist/webidl2.js +++ b/dist/webidl2.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.WebIDL2=t():e.WebIDL2=t()}(this,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var s=t[r]={i:r,l:!1,exports:{}};return e[r].call(s.exports,s,s.exports,n),s.l=!0,s.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var s in e)n.d(r,s,function(t){return e[t]}.bind(null,s));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=0)}([function(e,t,n){"use strict";function r(e,t,n,r,s){function o(n){return n>0?e.slice(t,t+n):e.slice(Math.max(t+n,0),t)}function a(n,{precedes:r}={}){const s=n.map(e=>e.trivia+e.value).join(""),o=e[t];return"eof"===o.type?s:r?s+o.trivia:s.slice(o.trivia.length)}const i="eof"!==e[t].type?e[t].line:e.length>1?e[t-1].line:1,c=function(e){const t=e.split("\n");return t[t.length-1]}(a(o(-5),{precedes:!0})),u=o(5),l=a(u),p=c+l.split("\n")[0]+"\n"+(" ".repeat(c.length)+"^"),d="Syntax"===s?"since":"inside",m=`${s} error at line ${i}${e.name?` in ${e.name}`:""}${n?`, ${d} \`${n.partial?"partial ":""}${n.type} ${n.name}\``:""}:\n${p}`;return{message:`${m} ${r}`,bareMessage:r,context:m,line:i,sourceName:e.name,input:l,tokens:u}}function s(e,t,n,s){return r(e,t.index,n,s,"Validation")}n.r(t);const o={decimal:/-?(?=[0-9]*\.|[0-9]+[eE])(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,integer:/-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,identifier:/[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,string:/"[^"]*"/y,whitespace:/[\t\n\r ]+/y,comment:/((\/(\/.*|\*([^*]|\*[^\/])*\*\/)[\t\n\r ]*)+)/y,other:/[^\t\n\r 0-9A-Za-z]/y},a=["ByteString","DOMString","USVString"],i=["attribute","callback","const","deleter","dictionary","enum","getter","includes","inherit","interface","iterable","maplike","namespace","partial","required","setlike","setter","static","stringifier","typedef","unrestricted"],c=["-Infinity","FrozenArray","Infinity","NaN","Promise","boolean","byte","double","false","float","implements","legacyiterable","long","mixin","null","octet","optional","or","readonly","record","sequence","short","true","unsigned","void"].concat(i,a),u=["(",")",",","...",":",";","<","=",">","?","[","]","{","}"];class l{constructor(e){this.source=function(e){const t=[];let n=0,r="",s=1,a=0;for(;nthis.position&&this.source[this.position].type===e}consume(...e){for(const t of e){if(!this.probe(t))continue;const e=this.source[this.position];return this.position++,e}}unconsume(e){this.position=e}}class p extends Error{constructor({message:e,bareMessage:t,context:n,line:r,sourceName:s,input:o,tokens:a}){super(e),this.name="WebIDLParseError",this.bareMessage=t,this.context=n,this.line=r,this.sourceName=s,this.input=o,this.tokens=a}}class d{constructor({source:e,tokens:t}){Object.defineProperties(this,{source:{value:e},tokens:{value:t}})}toJSON(){const e={type:void 0,name:void 0,inheritance:void 0};let t=this;for(;t!==Object.prototype;){const n=Object.getOwnPropertyDescriptors(t);for(const[t,r]of Object.entries(n))(r.enumerable||r.get)&&(e[t]=this[t]);t=Object.getPrototypeOf(t)}return e}}function m(e,t){const n=e.consume("?");n&&(t.tokens.nullable=n),e.probe("?")&&e.error("Can't nullable more than once")}function f(e,t){let n=function(e,t){const n=e.consume("FrozenArray","Promise","sequence","record");if(!n)return;const r=new y({source:e.source,tokens:{base:n}});switch(r.tokens.open=e.consume("<")||e.error(`No opening bracket after ${n.type}`),n.type){case"Promise":{e.probe("[")&&e.error("Promise type cannot have extended attribute");const n=q(e,t)||e.error("Missing Promise subtype");r.subtype.push(n);break}case"sequence":case"FrozenArray":{const s=E(e,t)||e.error(`Missing ${n.type} subtype`);r.subtype.push(s);break}case"record":{e.probe("[")&&e.error("Record key cannot have extended attribute");const n=e.consume(...a)||e.error(`Record key must be one of: ${a.join(", ")}`),s=new y({source:e.source,tokens:{base:n}});s.tokens.separator=e.consume(",")||e.error("Missing comma after record key type"),s.type=t;const o=E(e,t)||e.error("Error parsing generic type record");r.subtype.push(s,o);break}}return r.idlType||e.error(`Error parsing generic type ${n.type}`),r.tokens.close=e.consume(">")||e.error(`Missing closing bracket after ${n.type}`),r}(e,t)||O(e);if(!n){const t=e.consume("identifier",...a);if(!t)return;n=new y({source:e.source,tokens:{base:t}}),e.probe("<")&&e.error(`Unsupported generic type ${t.value}`)}return"Promise"===n.generic&&e.probe("?")&&e.error("Promise type cannot be nullable"),n.type=t||null,m(e,n),n.nullable&&"any"===n.idlType&&e.error("Type `any` cannot be made nullable"),n}class y extends d{static parse(e,t){return f(e,t)||function(e,t){const n={};if(n.open=e.consume("("),!n.open)return;const r=new y({source:e.source,tokens:n});for(r.type=t||null;;){const t=E(e)||e.error("No type after open parenthesis or 'or' in union type");"any"===t.idlType&&e.error("Type `any` cannot be included in a union type"),r.subtype.push(t);const n=e.consume("or");if(!n)break;t.tokens.separator=n}return r.idlType.length<2&&e.error("At least two types are expected in a union type but found less"),n.close=e.consume(")")||e.error("Unterminated union type"),m(e,r),r}(e,t)}constructor({source:e,tokens:t}){super({source:e,tokens:t}),Object.defineProperty(this,"subtype",{value:[]}),this.extAttrs=[]}get generic(){return this.subtype.length&&this.tokens.base?this.tokens.base.value:""}get nullable(){return Boolean(this.tokens.nullable)}get union(){return Boolean(this.subtype.length)&&!this.tokens.base}get idlType(){if(this.subtype.length)return this.subtype;return M([this.tokens.prefix,this.tokens.base,this.tokens.postfix].filter(e=>e).map(e=>e.value).join(" "))}}class k extends d{static parse(e){const t=e.consume("=");if(!t)return null;const n=j(e)||e.consume("string","null","[","{")||e.error("No value for default"),r=[n];if("["===n.type){const t=e.consume("]")||e.error("Default sequence value must be empty");r.push(t)}else if("{"===n.type){const t=e.consume("}")||e.error("Default dictionary value must be empty");r.push(t)}return new k({source:e.source,tokens:{assign:t},expression:r})}constructor({source:e,tokens:t,expression:n}){super({source:e,tokens:t}),Object.defineProperty(this,"expression",{value:n})}get type(){return I(this.expression[0]).type}get value(){return I(this.expression[0]).value}get negative(){return I(this.expression[0]).negative}}class b extends Array{constructor({source:e,tokens:t}){super(),Object.defineProperties(this,{source:{value:e},tokens:{value:t}})}}class h extends d{static parse(e){const t={assign:e.consume("=")},n=new h({source:e.source,tokens:t});return t.assign&&(t.secondaryName=e.consume("identifier","decimal","integer","string")),t.open=e.consume("("),t.open?(n.list="identifier-list"===n.rhsType?function(e){const t=N(e,{parser:w.parser(e,"identifier"),listName:"identifier list"});t.length||e.error("Expected identifiers but none found");return t}(e):P(e),t.close=e.consume(")")||e.error("Unexpected token in extended attribute argument list")):n.hasRhs&&!t.secondaryName&&e.error("No right hand side to extended attribute assignment"),n}get rhsType(){return this.tokens.assign?this.tokens.secondaryName?this.tokens.secondaryName.type:"identifier-list":null}}class g extends d{static parse(e){const t=e.consume("identifier");if(t)return new g({tokens:{name:t},params:h.parse(e)})}constructor({source:e,tokens:t,params:n}){super({source:e,tokens:t}),Object.defineProperty(this,"params",{value:n})}get type(){return"extended-attribute"}get name(){return this.tokens.name.value}get rhs(){const{rhsType:e,tokens:t,list:n}=this.params;return e?{type:e,value:"identifier-list"===e?n:t.secondaryName.value}:null}get arguments(){const{rhsType:e,list:t}=this.params;return t&&"identifier-list"!==e?t:[]}}class x extends b{static parse(e){const t={};if(t.open=e.consume("["),!t.open)return[];const n=new x({source:e.source,tokens:t});return n.push(...N(e,{parser:g.parse,listName:"extended attribute"})),t.close=e.consume("]")||e.error("Unexpected closing token of extended attribute"),n.length||e.error("Found an empty extended attribute"),e.probe("[")&&e.error("Illegal double extended attribute lists, consider merging them"),n}}class v extends d{static parse(e){const t=e.position,n={},r=new v({source:e.source,tokens:n});return r.extAttrs=x.parse(e),n.optional=e.consume("optional"),r.idlType=E(e,"argument-type"),r.idlType?(n.optional||(n.variadic=e.consume("...")),n.name=e.consume("identifier",...i),n.name?(r.default=n.optional?k.parse(e):null,r):e.unconsume(t)):e.unconsume(t)}get optional(){return!!this.tokens.optional}get variadic(){return!!this.tokens.variadic}get name(){return M(this.tokens.name.value)}}class w extends d{static parser(e,t){return()=>{const n=e.consume(t);if(n)return new w({source:e.source,tokens:{value:n}})}}get value(){return this.tokens.value.value}}function T(e,t){if(!e.union){const n=t.unique.get(e.idlType);return!!n&&("typedef"===n.type?T(n.idlType,t):"dictionary"===n.type)}for(const n of e.subtype)if(T(n,t))return!0;return!1}class A extends d{static parse(e,{special:t,regular:n}={}){const r={special:t},s=new A({source:e.source,tokens:r});return t&&"stringifier"===t.value&&(r.termination=e.consume(";"),r.termination)?(s.arguments=[],s):(t||n||(r.special=e.consume("getter","setter","deleter")),s.idlType=q(e)||e.error("Missing return type"),r.name=e.consume("identifier"),r.open=e.consume("(")||e.error("Invalid operation"),s.arguments=P(e),r.close=e.consume(")")||e.error("Unterminated operation"),r.termination=e.consume(";")||e.error("Unterminated operation, expected `;`"),s)}get type(){return"operation"}get name(){const{name:e}=this.tokens;return e?M(e.value):""}get special(){return this.tokens.special?this.tokens.special.value:""}*validate(e){for(const t of this.arguments)if(T(t.idlType,e)&&!t.default){const e="Optional dictionary arguments must have a default value of `{}`.";yield s(this.source,t.tokens.name,this,e)}}}class $ extends d{static parse(e,{special:t,noInherit:n=!1,readonly:r=!1}={}){const s=e.position,o={special:t},a=new $({source:e.source,tokens:o});if(t||n||(o.special=e.consume("inherit")),"inherit"===a.special&&e.probe("readonly")&&e.error("Inherited attributes cannot be read-only"),o.readonly=e.consume("readonly"),r&&!o.readonly&&e.probe("attribute")&&e.error("Attributes must be readonly in this context"),o.base=e.consume("attribute"),o.base){switch(a.idlType=E(e,"attribute-type")||e.error("Attribute lacks a type"),a.idlType.generic){case"sequence":case"record":e.error(`Attributes cannot accept ${a.idlType.generic} types`)}return o.name=e.consume("identifier","required")||e.error("Attribute lacks a name"),o.termination=e.consume(";")||e.error("Unterminated attribute, expected `;`"),a}e.unconsume(s)}get type(){return"attribute"}get special(){return this.tokens.special?this.tokens.special.value:""}get readonly(){return!!this.tokens.readonly}get name(){return M(this.tokens.name.value)}}function M(e){return e.startsWith("_")?e.slice(1):e}function N(e,{parser:t,allowDangler:n,listName:r="list"}){const s=t(e);if(!s)return[];s.tokens.separator=e.consume(",");const o=[s];for(;s.tokens.separator;){const s=t(e);if(!s){n||e.error(`Trailing comma in ${r}`);break}if(s.tokens.separator=e.consume(","),o.push(s),!s.tokens.separator)break}return o}function j(e){return e.consume("true","false","Infinity","-Infinity","NaN","decimal","integer")}function I({type:e,value:t}){switch(e){case"true":case"false":return{type:"boolean",value:"true"===e};case"Infinity":case"-Infinity":return{type:"Infinity",negative:e.startsWith("-")};case"[":return{type:"sequence",value:[]};case"{":return{type:"dictionary"};case"decimal":case"integer":return{type:"number",value:t};case"string":return{type:"string",value:t.slice(1,-1)};default:return{type:e}}}function O(e){const{source:t}=e,n=function(){const n=e.consume("unsigned"),r=e.consume("short","long");if(r){const s=e.consume("long");return new y({source:t,tokens:{prefix:n,base:r,postfix:s}})}n&&e.error("Failed to parse integer type")}()||function(){const n=e.consume("unrestricted"),r=e.consume("float","double");if(r)return new y({source:t,tokens:{prefix:n,base:r}});n&&e.error("Failed to parse float type")}();if(n)return n;const r=e.consume("boolean","byte","octet");return r?new y({source:t,tokens:{base:r}}):void 0}function P(e){return N(e,{parser:v.parse,listName:"arguments list"})}function E(e,t){const n=x.parse(e),r=y.parse(e,t);return r&&(r.extAttrs=n),r}function q(e,t){const n=y.parse(e,t||"return-type");if(n)return n;const r=e.consume("void");if(r){const t=new y({source:e.source,tokens:{base:r}});return t.type="return-type",t}}function U(e){const t=e.consume("stringifier");if(t)return $.parse(e,{special:t})||A.parse(e,{special:t})||e.error("Unterminated stringifier")}class S extends w{static parse(e){const t=e.consume("string");if(t)return new S({source:e.source,tokens:{value:t}})}get type(){return"enum-value"}get value(){return super.value.slice(1,-1)}}class D extends d{static parse(e){const t={};if(t.base=e.consume("enum"),!t.base)return;t.name=e.consume("identifier")||e.error("No name for enum");const n=e.current=new D({source:e.source,tokens:t});return t.open=e.consume("{")||e.error("Bodyless enum"),n.values=N(e,{parser:S.parse,allowDangler:!0,listName:"enumeration"}),e.probe("string")&&e.error("No comma between enum values"),t.close=e.consume("}")||e.error("Unexpected value in enum"),n.values.length||e.error("No value in enum"),t.termination=e.consume(";")||e.error("No semicolon after enum"),n}get type(){return"enum"}get name(){return M(this.tokens.name.value)}}class W extends d{static parse(e){const t=e.consume("identifier");if(!t)return;const n={target:t};if(n.includes=e.consume("includes"),n.includes)return n.mixin=e.consume("identifier")||e.error("Incomplete includes statement"),n.termination=e.consume(";")||e.error("No terminating ; for includes statement"),new W({source:e.source,tokens:n});e.unconsume(t.index)}get type(){return"includes"}get target(){return M(this.tokens.target.value)}get includes(){return M(this.tokens.mixin.value)}}class _ extends d{static parse(e){const t={},n=new _({source:e.source,tokens:t});if(t.base=e.consume("typedef"),t.base)return n.idlType=E(e,"typedef-type")||e.error("Typedef lacks a type"),t.name=e.consume("identifier")||e.error("Typedef lacks a name"),e.current=n,t.termination=e.consume(";")||e.error("Unterminated typedef, expected `;`"),n}get type(){return"typedef"}get name(){return M(this.tokens.name.value)}}class F extends d{static parse(e,t){const n={base:t},r=new F({source:e.source,tokens:n});return n.name=e.consume("identifier")||e.error("Callback lacks a name"),e.current=r,n.assign=e.consume("=")||e.error("Callback lacks an assignment"),r.idlType=q(e)||e.error("Callback lacks a return type"),n.open=e.consume("(")||e.error("Callback lacks parentheses for arguments"),r.arguments=P(e),n.close=e.consume(")")||e.error("Unterminated callback"),n.termination=e.consume(";")||e.error("Unterminated callback, expected `;`"),r}get type(){return"callback"}get name(){return M(this.tokens.name.value)}}class C extends d{static parse(e,t,{type:n,inheritable:r,allowedMembers:s}){const{tokens:o}=t;for(o.name=e.consume("identifier")||e.error(`Missing name in ${t.type}`),e.current=t,r&&Object.assign(o,function(e){const t=e.consume(":");return t?{colon:t,inheritance:e.consume("identifier")||e.error("Inheritance lacks a type")}:{}}(e)),o.open=e.consume("{")||e.error(`Bodyless ${n}`),t.members=[];;){if(o.close=e.consume("}"),o.close)return o.termination=e.consume(";")||e.error(`Missing semicolon after ${n}`),t;const r=x.parse(e);let a;for(const[t,...n]of s)if(a=t(e,...n))break;a||e.error("Unknown member"),a.extAttrs=r,t.members.push(a)}}get partial(){return!!this.tokens.partial}get name(){return M(this.tokens.name.value)}get inheritance(){return this.tokens.inheritance?M(this.tokens.inheritance.value):null}*validate(e){for(const t of this.members)t.validate&&(yield*t.validate(e))}}class R extends d{static parse(e){const t={};if(t.base=e.consume("const"),!t.base)return;let n=O(e);if(!n){const t=e.consume("identifier")||e.error("Const lacks a type");n=new y({source:e.source,tokens:{base:t}})}e.probe("?")&&e.error("Unexpected nullable constant type"),n.type="const-type",t.name=e.consume("identifier")||e.error("Const lacks a name"),t.assign=e.consume("=")||e.error("Const lacks value assignment"),t.value=j(e)||e.error("Const lacks a value"),t.termination=e.consume(";")||e.error("Unterminated const, expected `;`");const r=new R({source:e.source,tokens:t});return r.idlType=n,r}get type(){return"const"}get name(){return unescape(this.tokens.name.value)}get value(){return I(this.tokens.value)}}class z extends d{static parse(e){const t=e.position,n={},r=new z({source:e.source,tokens:n});if(n.readonly=e.consume("readonly"),n.base=n.readonly?e.consume("maplike","setlike"):e.consume("iterable","maplike","setlike"),!n.base)return void e.unconsume(t);const{type:s}=r,o="maplike"===s,a=o||"iterable"===s;n.open=e.consume("<")||e.error(`Missing less-than sign \`<\` in ${s} declaration`);const i=E(e)||e.error(`Missing a type argument in ${s} declaration`);return r.idlType=[i],a&&(i.tokens.separator=e.consume(","),i.tokens.separator?r.idlType.push(E(e)):o&&e.error(`Missing second type argument in ${s} declaration`)),n.close=e.consume(">")||e.error(`Missing greater-than sign \`>\` in ${s} declaration`),n.termination=e.consume(";")||e.error(`Missing semicolon after ${s} declaration`),r}get type(){return this.tokens.base.value}get readonly(){return!!this.tokens.readonly}}function B(e){const t=e.consume("static");if(t)return $.parse(e,{special:t})||A.parse(e,{special:t})||e.error("No body in static member")}class L extends C{static parse(e,t,{partial:n=null}={}){const r={partial:n,base:t};return C.parse(e,new L({source:e.source,tokens:r}),{type:"interface",inheritable:!n,allowedMembers:[[R.parse],[B],[U],[z.parse],[$.parse],[A.parse]]})}get type(){return"interface"}*validate(e){if(!this.partial&&this.extAttrs.every(e=>"Exposed"!==e.name)){const e="Interfaces must have `[Exposed]` extended attribute. To fix, add, for example, `[Exposed=Window]`. Please also consider carefully if your interface should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield s(this.source,this.tokens.name,this,e)}yield*super.validate(e),this.partial||(yield*function*(e,t){const n=new Set(i(t).map(e=>e.name)),r=e.partials.get(t.name)||[],o=e.mixinMap.get(t.name)||[];for(const e of[...r,...o]){const r=i(e);yield*a(r,n,e,t);for(const e of r)n.add(e.name)}function*a(e,t,n,r){for(const o of e){const{name:e}=o;if(e&&t.has(e)){const t=`The operation "${e}" has already been defined for the base interface "${r.name}" either in itself or in a mixin`;yield s(n.source,o.tokens.name,n,t)}}}function i(e){return e.members.filter(({type:e})=>"operation"===e)}}(e,this))}}class Z extends C{static parse(e,t,{partial:n}={}){const r={partial:n,base:t};if(r.mixin=e.consume("mixin"),r.mixin)return C.parse(e,new Z({source:e.source,tokens:r}),{type:"interface mixin",allowedMembers:[[R.parse],[U],[$.parse,{noInherit:!0}],[A.parse,{regular:!0}]]})}get type(){return"interface mixin"}}class V extends d{static parse(e){const t={},n=new V({source:e.source,tokens:t});return n.extAttrs=x.parse(e),t.required=e.consume("required"),n.idlType=E(e,"dictionary-type")||e.error("Dictionary member lacks a type"),t.name=e.consume("identifier")||e.error("Dictionary member lacks a name"),n.default=k.parse(e),t.required&&n.default&&e.error("Required member must not have a default"),t.termination=e.consume(";")||e.error("Unterminated dictionary member, expected `;`"),n}get type(){return"field"}get name(){return M(this.tokens.name.value)}get required(){return!!this.tokens.required}}class J extends C{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("dictionary"),n.base)return C.parse(e,new J({source:e.source,tokens:n}),{type:"dictionary",inheritable:!t,allowedMembers:[[V.parse]]})}get type(){return"dictionary"}}class X extends C{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("namespace"),n.base)return C.parse(e,new X({source:e.source,tokens:n}),{type:"namespace",allowedMembers:[[$.parse,{noInherit:!0,readonly:!0}],[A.parse,{regular:!0}]]})}get type(){return"namespace"}*validate(e){if(!this.partial&&this.extAttrs.every(e=>"Exposed"!==e.name)){const e="Namespaces must have [Exposed] extended attribute. To fix, add, for example, [Exposed=Window]. Please also consider carefully if your namespace should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield s(this.source,this.tokens.name,this,e)}yield*super.validate(e)}}class G extends C{static parse(e,t,{partial:n=null}={}){const r={callback:t};if(r.base=e.consume("interface"),r.base)return C.parse(e,new G({source:e.source,tokens:r}),{type:"callback interface",inheritable:!n,allowedMembers:[[R.parse],[A.parse,{regular:!0}]]})}get type(){return"callback interface"}}function H(e,t){const n=e.source;function r(t){e.error(t)}function s(...t){return e.consume(...t)}function o(t){const n=s("interface");if(n)return Z.parse(e,n,t)||L.parse(e,n,t)||r("Interface has no proper body")}function a(){return function(){const t=s("callback");if(t)return e.probe("interface")?G.parse(e,t):F.parse(e,t)}()||o()||function(){const t=s("partial");if(t)return J.parse(e,{partial:t})||o({partial:t})||X.parse(e,{partial:t})||r("Partial doesn't apply to anything")}()||J.parse(e)||D.parse(e)||_.parse(e)||W.parse(e)||X.parse(e)}const i=function(){if(!n.length)return[];const o=[];for(;;){const t=x.parse(e),n=a();if(!n){t.length&&r("Stray extended attributes");break}n.extAttrs=t,o.push(n)}const i=s("eof");return t.concrete&&o.push(i),o}();return e.positione.join(""),trivia:Q,name:Q,reference:Q,type:Q,generic:Q,inheritance:Q,definition:Q,extendedAttribute:Q,extendedAttributeReference:Q};function ee(e,{templates:t=Y}={}){function n(e,{unescaped:n,context:r}){return n||(n=e.startsWith("_")?e.slice(1):e),t.reference(e,n,r)}function r(e,n=Q,...r){if(!e)return"";const s=n(e.value,...r);return t.wrap([t.trivia(e.trivia),s])}function s(e,t){return r(e,n,{context:t})}function o(e,n){return r(e,t.name,n)}function a(e){if(e.union||e.generic)return t.wrap([r(e.tokens.base,t.generic),r(e.tokens.open),...e.subtype.map(i),r(e.tokens.close)]);const s=e.tokens.prefix||e.tokens.base,o=e.tokens.prefix?[e.tokens.prefix.value,t.trivia(e.tokens.base.trivia)]:[],a=n(t.wrap([...o,e.tokens.base.value,r(e.tokens.postfix)]),{unescaped:e.idlType,context:e});return t.wrap([t.trivia(s.trivia),a])}function i(e){return t.wrap([p(e.extAttrs),a(e),r(e.tokens.nullable),r(e.tokens.separator)])}function c(e){return e?t.wrap([r(e.tokens.assign),...e.expression.map(e=>r(e))]):""}function u(e){return t.wrap([p(e.extAttrs),r(e.tokens.optional),t.type(i(e.idlType)),r(e.tokens.variadic),o(e.tokens.name,{data:e}),c(e.default),r(e.tokens.separator)])}function l(e){const{rhsType:n}=e.params;return t.wrap([t.trivia(e.tokens.name.trivia),t.extendedAttribute(t.wrap([t.extendedAttributeReference(e.name),r(e.params.tokens.assign),s(e.params.tokens.secondaryName,e),r(e.params.tokens.open),...e.params.list?e.params.list.map("identifier-list"===n?n=>(function(e,n){return t.wrap([s(e.tokens.value,n),r(e.tokens.separator)])})(n,e):u):[],r(e.params.tokens.close)])),r(e.tokens.separator)])}function p(e){return e.length?t.wrap([r(e.tokens.open),...e.map(l),r(e.tokens.close)]):""}function d(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.callback),r(e.tokens.partial),r(e.tokens.base),r(e.tokens.mixin),o(e.tokens.name,{data:e}),(s=e,s.tokens.inheritance?t.wrap([r(s.tokens.colon),t.trivia(s.tokens.inheritance.trivia),t.inheritance(n(s.tokens.inheritance.value,{context:s}))]):""),r(e.tokens.open),y(e.members,e),r(e.tokens.close),r(e.tokens.termination)]),{data:e});var s}function m(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.readonly),r(e.tokens.base,t.generic),r(e.tokens.open),t.wrap(e.idlType.map(i)),r(e.tokens.close),r(e.tokens.termination)]),{data:e,parent:n})}t=Object.assign({},Y,t);const f={interface:d,"interface mixin":d,namespace:d,operation:function(e,n){const s=e.idlType?[t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),r(e.tokens.open),t.wrap(e.arguments.map(u)),r(e.tokens.close)]:[];return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.special),...s,r(e.tokens.termination)]),{data:e,parent:n})},attribute:function(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.special),r(e.tokens.readonly),r(e.tokens.base),t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),r(e.tokens.termination)]),{data:e,parent:n})},dictionary:d,field:function(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.required),t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),c(e.default),r(e.tokens.termination)]),{data:e,parent:n})},const:function(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),r(e.tokens.assign),r(e.tokens.value),r(e.tokens.termination)]),{data:e,parent:n})},typedef:function(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),t.type(i(e.idlType)),o(e.tokens.name,{data:e}),r(e.tokens.termination)]),{data:e})},includes:function(e){return t.definition(t.wrap([p(e.extAttrs),s(e.tokens.target,e),r(e.tokens.includes),s(e.tokens.mixin,e),r(e.tokens.termination)]),{data:e})},callback:function(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),o(e.tokens.name,{data:e}),r(e.tokens.assign),t.type(i(e.idlType)),r(e.tokens.open),...e.arguments.map(u),r(e.tokens.close),r(e.tokens.termination)]),{data:e})},enum:function(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),o(e.tokens.name,{data:e}),r(e.tokens.open),y(e.values,e),r(e.tokens.close),r(e.tokens.termination)]),{data:e})},"enum-value":function(e,n){return t.wrap([t.trivia(e.tokens.value.trivia),t.definition(t.wrap(['"',t.name(e.value,{data:e,parent:n}),'"']),{data:e,parent:n}),r(e.tokens.separator)])},iterable:m,legacyiterable:m,maplike:m,setlike:m,"callback interface":d,eof:function(e){return t.trivia(e.trivia)}};function y(e,n){if(!e)return;const r=e.map(e=>(function(e,t){if(!f[e.type])throw new Error(`Type "${e.type}" is unsupported`);return f[e.type](e,t)})(e,n));return t.wrap(r)}return y(e)}function te(e,t){const n=new Map,r=e.filter(e=>"includes"===e.type);for(const e of r){const r=t.get(e.includes);if(!r)continue;const s=n.get(e.target);s?s.push(r):n.set(e.target,[r])}return n}function*ne(e){const t=function(e){const t=new Map,n=new Set,r=new Map;for(const s of e)if(s.partial){const e=r.get(s.name);e?e.push(s):r.set(s.name,[s])}else s.name&&(t.has(s.name)?n.add(s):t.set(s.name,s));return{all:e,unique:t,partials:r,duplicates:n,mixinMap:te(e,t)}}(e);for(const e of t.all)e.validate&&(yield*e.validate(t));yield*function*({unique:e,duplicates:t}){for(const n of t){const{name:t}=n,r=`The name "${t}" of type "${e.get(t).type}" was already seen`;yield s(n.source,n.tokens.name,n,r)}}(t)}function re(e){return[...ne((t=e,t.flat?t.flat():[].concat(...t)))];var t}n.d(t,"parse",function(){return K}),n.d(t,"write",function(){return ee}),n.d(t,"validate",function(){return re})}])}); +!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.WebIDL2=t():e.WebIDL2=t()}(this,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var s=t[r]={i:r,l:!1,exports:{}};return e[r].call(s.exports,s,s.exports,n),s.l=!0,s.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var s in e)n.d(r,s,function(t){return e[t]}.bind(null,s));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=0)}([function(e,t,n){"use strict";function r(e,t,n,r,s){function o(n){return n>0?e.slice(t,t+n):e.slice(Math.max(t+n,0),t)}function a(n,{precedes:r}={}){const s=n.map(e=>e.trivia+e.value).join(""),o=e[t];return"eof"===o.type?s:r?s+o.trivia:s.slice(o.trivia.length)}const i="eof"!==e[t].type?e[t].line:e.length>1?e[t-1].line:1,c=function(e){const t=e.split("\n");return t[t.length-1]}(a(o(-5),{precedes:!0})),u=o(5),l=a(u),p=c+l.split("\n")[0]+"\n"+(" ".repeat(c.length)+"^"),d="Syntax"===s?"since":"inside",m=`${s} error at line ${i}${e.name?` in ${e.name}`:""}${n?`, ${d} \`${n.partial?"partial ":""}${n.type} ${n.name}\``:""}:\n${p}`;return{message:`${m} ${r}`,bareMessage:r,context:m,line:i,sourceName:e.name,input:l,tokens:u}}function s(e,t,n,s){return r(e,t.index,n,s,"Validation")}n.r(t);const o={decimal:/-?(?=[0-9]*\.|[0-9]+[eE])(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,integer:/-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,identifier:/[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,string:/"[^"]*"/y,whitespace:/[\t\n\r ]+/y,comment:/((\/(\/.*|\*([^*]|\*[^\/])*\*\/)[\t\n\r ]*)+)/y,other:/[^\t\n\r 0-9A-Za-z]/y},a=["ByteString","DOMString","USVString"],i=["attribute","callback","const","deleter","dictionary","enum","getter","includes","inherit","interface","iterable","maplike","namespace","partial","required","setlike","setter","static","stringifier","typedef","unrestricted"],c=["-Infinity","FrozenArray","Infinity","NaN","Promise","boolean","byte","double","false","float","implements","legacyiterable","long","mixin","null","octet","optional","or","readonly","record","sequence","short","true","unsigned","void"].concat(i,a),u=["(",")",",","...",":",";","<","=",">","?","[","]","{","}"];class l{constructor(e){this.source=function(e){const t=[];let n=0,r="",s=1,a=0;for(;nthis.position&&this.source[this.position].type===e}consume(...e){for(const t of e){if(!this.probe(t))continue;const e=this.source[this.position];return this.position++,e}}unconsume(e){this.position=e}}class p extends Error{constructor({message:e,bareMessage:t,context:n,line:r,sourceName:s,input:o,tokens:a}){super(e),this.name="WebIDLParseError",this.bareMessage=t,this.context=n,this.line=r,this.sourceName=s,this.input=o,this.tokens=a}}class d{constructor({source:e,tokens:t}){Object.defineProperties(this,{source:{value:e},tokens:{value:t}})}toJSON(){const e={type:void 0,name:void 0,inheritance:void 0};let t=this;for(;t!==Object.prototype;){const n=Object.getOwnPropertyDescriptors(t);for(const[t,r]of Object.entries(n))(r.enumerable||r.get)&&(e[t]=this[t]);t=Object.getPrototypeOf(t)}return e}}function m(e,t){const n=e.consume("?");n&&(t.tokens.nullable=n),e.probe("?")&&e.error("Can't nullable more than once")}function f(e,t){let n=function(e,t){const n=e.consume("FrozenArray","Promise","sequence","record");if(!n)return;const r=new y({source:e.source,tokens:{base:n}});switch(r.tokens.open=e.consume("<")||e.error(`No opening bracket after ${n.type}`),n.type){case"Promise":{e.probe("[")&&e.error("Promise type cannot have extended attribute");const n=E(e,t)||e.error("Missing Promise subtype");r.subtype.push(n);break}case"sequence":case"FrozenArray":{const s=P(e,t)||e.error(`Missing ${n.type} subtype`);r.subtype.push(s);break}case"record":{e.probe("[")&&e.error("Record key cannot have extended attribute");const n=e.consume(...a)||e.error(`Record key must be one of: ${a.join(", ")}`),s=new y({source:e.source,tokens:{base:n}});s.tokens.separator=e.consume(",")||e.error("Missing comma after record key type"),s.type=t;const o=P(e,t)||e.error("Error parsing generic type record");r.subtype.push(s,o);break}}return r.idlType||e.error(`Error parsing generic type ${n.type}`),r.tokens.close=e.consume(">")||e.error(`Missing closing bracket after ${n.type}`),r}(e,t)||I(e);if(!n){const t=e.consume("identifier",...a);if(!t)return;n=new y({source:e.source,tokens:{base:t}}),e.probe("<")&&e.error(`Unsupported generic type ${t.value}`)}return"Promise"===n.generic&&e.probe("?")&&e.error("Promise type cannot be nullable"),n.type=t||null,m(e,n),n.nullable&&"any"===n.idlType&&e.error("Type `any` cannot be made nullable"),n}class y extends d{static parse(e,t){return f(e,t)||function(e,t){const n={};if(n.open=e.consume("("),!n.open)return;const r=new y({source:e.source,tokens:n});for(r.type=t||null;;){const t=P(e)||e.error("No type after open parenthesis or 'or' in union type");"any"===t.idlType&&e.error("Type `any` cannot be included in a union type"),r.subtype.push(t);const n=e.consume("or");if(!n)break;t.tokens.separator=n}return r.idlType.length<2&&e.error("At least two types are expected in a union type but found less"),n.close=e.consume(")")||e.error("Unterminated union type"),m(e,r),r}(e,t)}constructor({source:e,tokens:t}){super({source:e,tokens:t}),Object.defineProperty(this,"subtype",{value:[]}),this.extAttrs=[]}get generic(){return this.subtype.length&&this.tokens.base?this.tokens.base.value:""}get nullable(){return Boolean(this.tokens.nullable)}get union(){return Boolean(this.subtype.length)&&!this.tokens.base}get idlType(){if(this.subtype.length)return this.subtype;return $([this.tokens.prefix,this.tokens.base,this.tokens.postfix].filter(e=>e).map(e=>e.value).join(" "))}}class k extends d{static parse(e){const t=e.consume("=");if(!t)return null;const n=N(e)||e.consume("string","null","[","{")||e.error("No value for default"),r=[n];if("["===n.type){const t=e.consume("]")||e.error("Default sequence value must be empty");r.push(t)}else if("{"===n.type){const t=e.consume("}")||e.error("Default dictionary value must be empty");r.push(t)}return new k({source:e.source,tokens:{assign:t},expression:r})}constructor({source:e,tokens:t,expression:n}){super({source:e,tokens:t}),Object.defineProperty(this,"expression",{value:n})}get type(){return j(this.expression[0]).type}get value(){return j(this.expression[0]).value}get negative(){return j(this.expression[0]).negative}}class b extends Array{constructor({source:e,tokens:t}){super(),Object.defineProperties(this,{source:{value:e},tokens:{value:t}})}}class h extends d{static parse(e){const t={assign:e.consume("=")},n=new h({source:e.source,tokens:t});return t.assign&&(t.secondaryName=e.consume("identifier","decimal","integer","string")),t.open=e.consume("("),t.open?(n.list="identifier-list"===n.rhsType?function(e){const t=M(e,{parser:w.parser(e,"identifier"),listName:"identifier list"});t.length||e.error("Expected identifiers but none found");return t}(e):O(e),t.close=e.consume(")")||e.error("Unexpected token in extended attribute argument list")):n.hasRhs&&!t.secondaryName&&e.error("No right hand side to extended attribute assignment"),n}get rhsType(){return this.tokens.assign?this.tokens.secondaryName?this.tokens.secondaryName.type:"identifier-list":null}}class g extends d{static parse(e){const t=e.consume("identifier");if(t)return new g({tokens:{name:t},params:h.parse(e)})}constructor({source:e,tokens:t,params:n}){super({source:e,tokens:t}),Object.defineProperty(this,"params",{value:n})}get type(){return"extended-attribute"}get name(){return this.tokens.name.value}get rhs(){const{rhsType:e,tokens:t,list:n}=this.params;return e?{type:e,value:"identifier-list"===e?n:t.secondaryName.value}:null}get arguments(){const{rhsType:e,list:t}=this.params;return t&&"identifier-list"!==e?t:[]}}class x extends b{static parse(e){const t={};if(t.open=e.consume("["),!t.open)return[];const n=new x({source:e.source,tokens:t});return n.push(...M(e,{parser:g.parse,listName:"extended attribute"})),t.close=e.consume("]")||e.error("Unexpected closing token of extended attribute"),n.length||e.error("Found an empty extended attribute"),e.probe("[")&&e.error("Illegal double extended attribute lists, consider merging them"),n}}class v extends d{static parse(e){const t=e.position,n={},r=new v({source:e.source,tokens:n});return r.extAttrs=x.parse(e),n.optional=e.consume("optional"),r.idlType=P(e,"argument-type"),r.idlType?(n.optional||(n.variadic=e.consume("...")),n.name=e.consume("identifier",...i),n.name?(r.default=n.optional?k.parse(e):null,r):e.unconsume(t)):e.unconsume(t)}get type(){return"argument"}get optional(){return!!this.tokens.optional}get variadic(){return!!this.tokens.variadic}get name(){return $(this.tokens.name.value)}*validate(e){if(function e(t,n){if(!t.union){const r=n.unique.get(t.idlType);return!!r&&("typedef"===r.type?e(r.idlType,n):"dictionary"===r.type)}for(const r of t.subtype)if(e(r,n))return!0;return!1}(this.idlType,e)&&this.optional&&!this.default){const e="Optional dictionary arguments must have a default value of `{}`.";yield s(this.source,this.tokens.name,this,e)}}}class w extends d{static parser(e,t){return()=>{const n=e.consume(t);if(n)return new w({source:e.source,tokens:{value:n}})}}get value(){return this.tokens.value.value}}class T extends d{static parse(e,{special:t,regular:n}={}){const r={special:t},s=new T({source:e.source,tokens:r});return t&&"stringifier"===t.value&&(r.termination=e.consume(";"),r.termination)?(s.arguments=[],s):(t||n||(r.special=e.consume("getter","setter","deleter")),s.idlType=E(e)||e.error("Missing return type"),r.name=e.consume("identifier"),r.open=e.consume("(")||e.error("Invalid operation"),s.arguments=O(e),r.close=e.consume(")")||e.error("Unterminated operation"),r.termination=e.consume(";")||e.error("Unterminated operation, expected `;`"),s)}get type(){return"operation"}get name(){const{name:e}=this.tokens;return e?$(e.value):""}get special(){return this.tokens.special?this.tokens.special.value:""}*validate(e){for(const t of this.arguments)yield*t.validate(e)}}class A extends d{static parse(e,{special:t,noInherit:n=!1,readonly:r=!1}={}){const s=e.position,o={special:t},a=new A({source:e.source,tokens:o});if(t||n||(o.special=e.consume("inherit")),"inherit"===a.special&&e.probe("readonly")&&e.error("Inherited attributes cannot be read-only"),o.readonly=e.consume("readonly"),r&&!o.readonly&&e.probe("attribute")&&e.error("Attributes must be readonly in this context"),o.base=e.consume("attribute"),o.base){switch(a.idlType=P(e,"attribute-type")||e.error("Attribute lacks a type"),a.idlType.generic){case"sequence":case"record":e.error(`Attributes cannot accept ${a.idlType.generic} types`)}return o.name=e.consume("identifier","required")||e.error("Attribute lacks a name"),o.termination=e.consume(";")||e.error("Unterminated attribute, expected `;`"),a}e.unconsume(s)}get type(){return"attribute"}get special(){return this.tokens.special?this.tokens.special.value:""}get readonly(){return!!this.tokens.readonly}get name(){return $(this.tokens.name.value)}}function $(e){return e.startsWith("_")?e.slice(1):e}function M(e,{parser:t,allowDangler:n,listName:r="list"}){const s=t(e);if(!s)return[];s.tokens.separator=e.consume(",");const o=[s];for(;s.tokens.separator;){const s=t(e);if(!s){n||e.error(`Trailing comma in ${r}`);break}if(s.tokens.separator=e.consume(","),o.push(s),!s.tokens.separator)break}return o}function N(e){return e.consume("true","false","Infinity","-Infinity","NaN","decimal","integer")}function j({type:e,value:t}){switch(e){case"true":case"false":return{type:"boolean",value:"true"===e};case"Infinity":case"-Infinity":return{type:"Infinity",negative:e.startsWith("-")};case"[":return{type:"sequence",value:[]};case"{":return{type:"dictionary"};case"decimal":case"integer":return{type:"number",value:t};case"string":return{type:"string",value:t.slice(1,-1)};default:return{type:e}}}function I(e){const{source:t}=e,n=function(){const n=e.consume("unsigned"),r=e.consume("short","long");if(r){const s=e.consume("long");return new y({source:t,tokens:{prefix:n,base:r,postfix:s}})}n&&e.error("Failed to parse integer type")}()||function(){const n=e.consume("unrestricted"),r=e.consume("float","double");if(r)return new y({source:t,tokens:{prefix:n,base:r}});n&&e.error("Failed to parse float type")}();if(n)return n;const r=e.consume("boolean","byte","octet");return r?new y({source:t,tokens:{base:r}}):void 0}function O(e){return M(e,{parser:v.parse,listName:"arguments list"})}function P(e,t){const n=x.parse(e),r=y.parse(e,t);return r&&(r.extAttrs=n),r}function E(e,t){const n=y.parse(e,t||"return-type");if(n)return n;const r=e.consume("void");if(r){const t=new y({source:e.source,tokens:{base:r}});return t.type="return-type",t}}function q(e){const t=e.consume("stringifier");if(t)return A.parse(e,{special:t})||T.parse(e,{special:t})||e.error("Unterminated stringifier")}class U extends w{static parse(e){const t=e.consume("string");if(t)return new U({source:e.source,tokens:{value:t}})}get type(){return"enum-value"}get value(){return super.value.slice(1,-1)}}class S extends d{static parse(e){const t={};if(t.base=e.consume("enum"),!t.base)return;t.name=e.consume("identifier")||e.error("No name for enum");const n=e.current=new S({source:e.source,tokens:t});return t.open=e.consume("{")||e.error("Bodyless enum"),n.values=M(e,{parser:U.parse,allowDangler:!0,listName:"enumeration"}),e.probe("string")&&e.error("No comma between enum values"),t.close=e.consume("}")||e.error("Unexpected value in enum"),n.values.length||e.error("No value in enum"),t.termination=e.consume(";")||e.error("No semicolon after enum"),n}get type(){return"enum"}get name(){return $(this.tokens.name.value)}}class D extends d{static parse(e){const t=e.consume("identifier");if(!t)return;const n={target:t};if(n.includes=e.consume("includes"),n.includes)return n.mixin=e.consume("identifier")||e.error("Incomplete includes statement"),n.termination=e.consume(";")||e.error("No terminating ; for includes statement"),new D({source:e.source,tokens:n});e.unconsume(t.index)}get type(){return"includes"}get target(){return $(this.tokens.target.value)}get includes(){return $(this.tokens.mixin.value)}}class W extends d{static parse(e){const t={},n=new W({source:e.source,tokens:t});if(t.base=e.consume("typedef"),t.base)return n.idlType=P(e,"typedef-type")||e.error("Typedef lacks a type"),t.name=e.consume("identifier")||e.error("Typedef lacks a name"),e.current=n,t.termination=e.consume(";")||e.error("Unterminated typedef, expected `;`"),n}get type(){return"typedef"}get name(){return $(this.tokens.name.value)}}class _ extends d{static parse(e,t){const n={base:t},r=new _({source:e.source,tokens:n});return n.name=e.consume("identifier")||e.error("Callback lacks a name"),e.current=r,n.assign=e.consume("=")||e.error("Callback lacks an assignment"),r.idlType=E(e)||e.error("Callback lacks a return type"),n.open=e.consume("(")||e.error("Callback lacks parentheses for arguments"),r.arguments=O(e),n.close=e.consume(")")||e.error("Unterminated callback"),n.termination=e.consume(";")||e.error("Unterminated callback, expected `;`"),r}get type(){return"callback"}get name(){return $(this.tokens.name.value)}}class F extends d{static parse(e,t,{type:n,inheritable:r,allowedMembers:s}){const{tokens:o}=t;for(o.name=e.consume("identifier")||e.error(`Missing name in ${t.type}`),e.current=t,r&&Object.assign(o,function(e){const t=e.consume(":");return t?{colon:t,inheritance:e.consume("identifier")||e.error("Inheritance lacks a type")}:{}}(e)),o.open=e.consume("{")||e.error(`Bodyless ${n}`),t.members=[];;){if(o.close=e.consume("}"),o.close)return o.termination=e.consume(";")||e.error(`Missing semicolon after ${n}`),t;const r=x.parse(e);let a;for(const[t,...n]of s)if(a=t(e,...n))break;a||e.error("Unknown member"),a.extAttrs=r,t.members.push(a)}}get partial(){return!!this.tokens.partial}get name(){return $(this.tokens.name.value)}get inheritance(){return this.tokens.inheritance?$(this.tokens.inheritance.value):null}*validate(e){for(const t of this.members)t.validate&&(yield*t.validate(e))}}class C extends d{static parse(e){const t={};if(t.base=e.consume("const"),!t.base)return;let n=I(e);if(!n){const t=e.consume("identifier")||e.error("Const lacks a type");n=new y({source:e.source,tokens:{base:t}})}e.probe("?")&&e.error("Unexpected nullable constant type"),n.type="const-type",t.name=e.consume("identifier")||e.error("Const lacks a name"),t.assign=e.consume("=")||e.error("Const lacks value assignment"),t.value=N(e)||e.error("Const lacks a value"),t.termination=e.consume(";")||e.error("Unterminated const, expected `;`");const r=new C({source:e.source,tokens:t});return r.idlType=n,r}get type(){return"const"}get name(){return unescape(this.tokens.name.value)}get value(){return j(this.tokens.value)}}class R extends d{static parse(e){const t=e.position,n={},r=new R({source:e.source,tokens:n});if(n.readonly=e.consume("readonly"),n.base=n.readonly?e.consume("maplike","setlike"):e.consume("iterable","maplike","setlike"),!n.base)return void e.unconsume(t);const{type:s}=r,o="maplike"===s,a=o||"iterable"===s;n.open=e.consume("<")||e.error(`Missing less-than sign \`<\` in ${s} declaration`);const i=P(e)||e.error(`Missing a type argument in ${s} declaration`);return r.idlType=[i],a&&(i.tokens.separator=e.consume(","),i.tokens.separator?r.idlType.push(P(e)):o&&e.error(`Missing second type argument in ${s} declaration`)),n.close=e.consume(">")||e.error(`Missing greater-than sign \`>\` in ${s} declaration`),n.termination=e.consume(";")||e.error(`Missing semicolon after ${s} declaration`),r}get type(){return this.tokens.base.value}get readonly(){return!!this.tokens.readonly}}function z(e){const t=e.consume("static");if(t)return A.parse(e,{special:t})||T.parse(e,{special:t})||e.error("No body in static member")}class B extends F{static parse(e,t,{partial:n=null}={}){const r={partial:n,base:t};return F.parse(e,new B({source:e.source,tokens:r}),{type:"interface",inheritable:!n,allowedMembers:[[C.parse],[z],[q],[R.parse],[A.parse],[T.parse]]})}get type(){return"interface"}*validate(e){if(!this.partial&&this.extAttrs.every(e=>"Exposed"!==e.name)){const e="Interfaces must have `[Exposed]` extended attribute. To fix, add, for example, `[Exposed=Window]`. Please also consider carefully if your interface should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield s(this.source,this.tokens.name,this,e)}yield*super.validate(e),this.partial||(yield*function*(e,t){const n=new Set(i(t).map(e=>e.name)),r=e.partials.get(t.name)||[],o=e.mixinMap.get(t.name)||[];for(const e of[...r,...o]){const r=i(e);yield*a(r,n,e,t);for(const e of r)n.add(e.name)}function*a(e,t,n,r){for(const o of e){const{name:e}=o;if(e&&t.has(e)){const t=`The operation "${e}" has already been defined for the base interface "${r.name}" either in itself or in a mixin`;yield s(n.source,o.tokens.name,n,t)}}}function i(e){return e.members.filter(({type:e})=>"operation"===e)}}(e,this))}}class L extends F{static parse(e,t,{partial:n}={}){const r={partial:n,base:t};if(r.mixin=e.consume("mixin"),r.mixin)return F.parse(e,new L({source:e.source,tokens:r}),{type:"interface mixin",allowedMembers:[[C.parse],[q],[A.parse,{noInherit:!0}],[T.parse,{regular:!0}]]})}get type(){return"interface mixin"}}class Z extends d{static parse(e){const t={},n=new Z({source:e.source,tokens:t});return n.extAttrs=x.parse(e),t.required=e.consume("required"),n.idlType=P(e,"dictionary-type")||e.error("Dictionary member lacks a type"),t.name=e.consume("identifier")||e.error("Dictionary member lacks a name"),n.default=k.parse(e),t.required&&n.default&&e.error("Required member must not have a default"),t.termination=e.consume(";")||e.error("Unterminated dictionary member, expected `;`"),n}get type(){return"field"}get name(){return $(this.tokens.name.value)}get required(){return!!this.tokens.required}}class V extends F{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("dictionary"),n.base)return F.parse(e,new V({source:e.source,tokens:n}),{type:"dictionary",inheritable:!t,allowedMembers:[[Z.parse]]})}get type(){return"dictionary"}}class J extends F{static parse(e,{partial:t}={}){const n={partial:t};if(n.base=e.consume("namespace"),n.base)return F.parse(e,new J({source:e.source,tokens:n}),{type:"namespace",allowedMembers:[[A.parse,{noInherit:!0,readonly:!0}],[T.parse,{regular:!0}]]})}get type(){return"namespace"}*validate(e){if(!this.partial&&this.extAttrs.every(e=>"Exposed"!==e.name)){const e="Namespaces must have [Exposed] extended attribute. To fix, add, for example, [Exposed=Window]. Please also consider carefully if your namespace should also be exposed in a Worker scope. Refer to the [WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) for more information.";yield s(this.source,this.tokens.name,this,e)}yield*super.validate(e)}}class X extends F{static parse(e,t,{partial:n=null}={}){const r={callback:t};if(r.base=e.consume("interface"),r.base)return F.parse(e,new X({source:e.source,tokens:r}),{type:"callback interface",inheritable:!n,allowedMembers:[[C.parse],[T.parse,{regular:!0}]]})}get type(){return"callback interface"}}function G(e,t){const n=e.source;function r(t){e.error(t)}function s(...t){return e.consume(...t)}function o(t){const n=s("interface");if(n)return L.parse(e,n,t)||B.parse(e,n,t)||r("Interface has no proper body")}function a(){return function(){const t=s("callback");if(t)return e.probe("interface")?X.parse(e,t):_.parse(e,t)}()||o()||function(){const t=s("partial");if(t)return V.parse(e,{partial:t})||o({partial:t})||J.parse(e,{partial:t})||r("Partial doesn't apply to anything")}()||V.parse(e)||S.parse(e)||W.parse(e)||D.parse(e)||J.parse(e)}const i=function(){if(!n.length)return[];const o=[];for(;;){const t=x.parse(e),n=a();if(!n){t.length&&r("Stray extended attributes");break}n.extAttrs=t,o.push(n)}const i=s("eof");return t.concrete&&o.push(i),o}();return e.positione.join(""),trivia:K,name:K,reference:K,type:K,generic:K,inheritance:K,definition:K,extendedAttribute:K,extendedAttributeReference:K};function Y(e,{templates:t=Q}={}){function n(e,{unescaped:n,context:r}){return n||(n=e.startsWith("_")?e.slice(1):e),t.reference(e,n,r)}function r(e,n=K,...r){if(!e)return"";const s=n(e.value,...r);return t.wrap([t.trivia(e.trivia),s])}function s(e,t){return r(e,n,{context:t})}function o(e,n){return r(e,t.name,n)}function a(e){if(e.union||e.generic)return t.wrap([r(e.tokens.base,t.generic),r(e.tokens.open),...e.subtype.map(i),r(e.tokens.close)]);const s=e.tokens.prefix||e.tokens.base,o=e.tokens.prefix?[e.tokens.prefix.value,t.trivia(e.tokens.base.trivia)]:[],a=n(t.wrap([...o,e.tokens.base.value,r(e.tokens.postfix)]),{unescaped:e.idlType,context:e});return t.wrap([t.trivia(s.trivia),a])}function i(e){return t.wrap([p(e.extAttrs),a(e),r(e.tokens.nullable),r(e.tokens.separator)])}function c(e){return e?t.wrap([r(e.tokens.assign),...e.expression.map(e=>r(e))]):""}function u(e){return t.wrap([p(e.extAttrs),r(e.tokens.optional),t.type(i(e.idlType)),r(e.tokens.variadic),o(e.tokens.name,{data:e}),c(e.default),r(e.tokens.separator)])}function l(e){const{rhsType:n}=e.params;return t.wrap([t.trivia(e.tokens.name.trivia),t.extendedAttribute(t.wrap([t.extendedAttributeReference(e.name),r(e.params.tokens.assign),s(e.params.tokens.secondaryName,e),r(e.params.tokens.open),...e.params.list?e.params.list.map("identifier-list"===n?n=>(function(e,n){return t.wrap([s(e.tokens.value,n),r(e.tokens.separator)])})(n,e):u):[],r(e.params.tokens.close)])),r(e.tokens.separator)])}function p(e){return e.length?t.wrap([r(e.tokens.open),...e.map(l),r(e.tokens.close)]):""}function d(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.callback),r(e.tokens.partial),r(e.tokens.base),r(e.tokens.mixin),o(e.tokens.name,{data:e}),(s=e,s.tokens.inheritance?t.wrap([r(s.tokens.colon),t.trivia(s.tokens.inheritance.trivia),t.inheritance(n(s.tokens.inheritance.value,{context:s}))]):""),r(e.tokens.open),y(e.members,e),r(e.tokens.close),r(e.tokens.termination)]),{data:e});var s}function m(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.readonly),r(e.tokens.base,t.generic),r(e.tokens.open),t.wrap(e.idlType.map(i)),r(e.tokens.close),r(e.tokens.termination)]),{data:e,parent:n})}t=Object.assign({},Q,t);const f={interface:d,"interface mixin":d,namespace:d,operation:function(e,n){const s=e.idlType?[t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),r(e.tokens.open),t.wrap(e.arguments.map(u)),r(e.tokens.close)]:[];return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.special),...s,r(e.tokens.termination)]),{data:e,parent:n})},attribute:function(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.special),r(e.tokens.readonly),r(e.tokens.base),t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),r(e.tokens.termination)]),{data:e,parent:n})},dictionary:d,field:function(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.required),t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),c(e.default),r(e.tokens.termination)]),{data:e,parent:n})},const:function(e,n){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),t.type(i(e.idlType)),o(e.tokens.name,{data:e,parent:n}),r(e.tokens.assign),r(e.tokens.value),r(e.tokens.termination)]),{data:e,parent:n})},typedef:function(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),t.type(i(e.idlType)),o(e.tokens.name,{data:e}),r(e.tokens.termination)]),{data:e})},includes:function(e){return t.definition(t.wrap([p(e.extAttrs),s(e.tokens.target,e),r(e.tokens.includes),s(e.tokens.mixin,e),r(e.tokens.termination)]),{data:e})},callback:function(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),o(e.tokens.name,{data:e}),r(e.tokens.assign),t.type(i(e.idlType)),r(e.tokens.open),...e.arguments.map(u),r(e.tokens.close),r(e.tokens.termination)]),{data:e})},enum:function(e){return t.definition(t.wrap([p(e.extAttrs),r(e.tokens.base),o(e.tokens.name,{data:e}),r(e.tokens.open),y(e.values,e),r(e.tokens.close),r(e.tokens.termination)]),{data:e})},"enum-value":function(e,n){return t.wrap([t.trivia(e.tokens.value.trivia),t.definition(t.wrap(['"',t.name(e.value,{data:e,parent:n}),'"']),{data:e,parent:n}),r(e.tokens.separator)])},iterable:m,legacyiterable:m,maplike:m,setlike:m,"callback interface":d,eof:function(e){return t.trivia(e.trivia)}};function y(e,n){if(!e)return;const r=e.map(e=>(function(e,t){if(!f[e.type])throw new Error(`Type "${e.type}" is unsupported`);return f[e.type](e,t)})(e,n));return t.wrap(r)}return y(e)}function ee(e,t){const n=new Map,r=e.filter(e=>"includes"===e.type);for(const e of r){const r=t.get(e.includes);if(!r)continue;const s=n.get(e.target);s?s.push(r):n.set(e.target,[r])}return n}function*te(e){const t=function(e){const t=new Map,n=new Set,r=new Map;for(const s of e)if(s.partial){const e=r.get(s.name);e?e.push(s):r.set(s.name,[s])}else s.name&&(t.has(s.name)?n.add(s):t.set(s.name,s));return{all:e,unique:t,partials:r,duplicates:n,mixinMap:ee(e,t)}}(e);for(const e of t.all)e.validate&&(yield*e.validate(t));yield*function*({unique:e,duplicates:t}){for(const n of t){const{name:t}=n,r=`The name "${t}" of type "${e.get(t).type}" was already seen`;yield s(n.source,n.tokens.name,n,r)}}(t)}function ne(e){return[...te((t=e,t.flat?t.flat():[].concat(...t)))];var t}n.d(t,"parse",function(){return H}),n.d(t,"write",function(){return Y}),n.d(t,"validate",function(){return ne})}])}); //# sourceMappingURL=webidl2.js.map \ No newline at end of file diff --git a/dist/webidl2.js.map b/dist/webidl2.js.map index f54e0065..98715b58 100644 --- a/dist/webidl2.js.map +++ b/dist/webidl2.js.map @@ -1 +1 @@ -{"version":3,"sources":["webpack://WebIDL2/webpack/universalModuleDefinition","webpack://WebIDL2/webpack/bootstrap","webpack://WebIDL2/./lib/error.js","webpack://WebIDL2/./lib/tokeniser.js","webpack://WebIDL2/./lib/productions/base.js","webpack://WebIDL2/./lib/productions/type.js","webpack://WebIDL2/./lib/productions/default.js","webpack://WebIDL2/./lib/productions/array-base.js","webpack://WebIDL2/./lib/productions/extended-attributes.js","webpack://WebIDL2/./lib/productions/helpers.js","webpack://WebIDL2/./lib/productions/argument.js","webpack://WebIDL2/./lib/productions/token.js","webpack://WebIDL2/./lib/validators/helpers.js","webpack://WebIDL2/./lib/productions/operation.js","webpack://WebIDL2/./lib/productions/attribute.js","webpack://WebIDL2/./lib/productions/enum.js","webpack://WebIDL2/./lib/productions/includes.js","webpack://WebIDL2/./lib/productions/typedef.js","webpack://WebIDL2/./lib/productions/callback.js","webpack://WebIDL2/./lib/productions/container.js","webpack://WebIDL2/./lib/productions/constant.js","webpack://WebIDL2/./lib/productions/iterable.js","webpack://WebIDL2/./lib/productions/interface.js","webpack://WebIDL2/./lib/validators/interface.js","webpack://WebIDL2/./lib/productions/mixin.js","webpack://WebIDL2/./lib/productions/field.js","webpack://WebIDL2/./lib/productions/dictionary.js","webpack://WebIDL2/./lib/productions/namespace.js","webpack://WebIDL2/./lib/productions/callback-interface.js","webpack://WebIDL2/./lib/webidl2.js","webpack://WebIDL2/./lib/writer.js","webpack://WebIDL2/./lib/validator.js","webpack://WebIDL2/./index.js"],"names":["root","factory","exports","module","define","amd","this","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","error_error","source","position","current","message","type","sliceTokens","count","slice","Math","max","tokensToText","inputs","precedes","text","map","trivia","join","nextToken","length","line","precedingLastLine","splitted","split","lastLine","subsequentTokens","subsequentText","sourceContext","repeat","contextType","context","partial","bareMessage","sourceName","input","tokens","validationError","token","index","tokenRe","decimal","integer","identifier","string","whitespace","comment","other","stringTypes","argumentNameKeywords","nonRegexTerminals","concat","punctuations","tokeniser_Tokeniser","[object Object]","idl","str","lastCharIndex","nextChar","charAt","result","test","attemptTokenMatch","noFlushTrivia","currentTrivia","pop","match","includes","punctuation","startsWith","push","Error","re","lastIndex","exec","tokenise","WebIDLParseError","syntaxError","candidates","probe","super","Base","defineProperties","json","undefined","inheritance","proto","descMap","getOwnPropertyDescriptors","entries","getPrototypeOf","type_suffix","tokeniser","obj","nullable","consume","error","single_type","typeName","ret","base","type_Type","open","subtype","return_type","type_with_extended_attributes","keyType","keyIdlType","separator","valueType","idlType","close","generic_type","primitive_type","generic","typ","or","union_type","extAttrs","Boolean","union","helpers_unescape","prefix","postfix","filter","default_Default","assign","def","const_value","expression","const_data","negative","ArrayBase","Array","extended_attributes_ExtendedAttributeParameters","secondaryName","list","rhsType","ids","parser","token_Token","listName","identifiers","argument_list","hasRhs","extended_attributes_SimpleExtendedAttribute","params","parse","rhs","arguments","extended_attributes_ExtendedAttributes","argument_Argument","start_position","optional","variadic","default","unconsume","idlTypeIncludesDictionary","defs","unique","operation_Operation","special","regular","termination","argument","attribute_Attribute","noInherit","readonly","allowDangler","first","items","item","num_type","integer_type","decimal_type","voidToken","stringifier","enum_EnumValue","enum_Enum","values","includes_Includes","target","mixin","typedef_Typedef","callback_CallbackFunction","container_Container","instance","inheritable","allowedMembers","colon","members","ea","mem","args","member","validate","constant_Constant","unescape","iterable_IterableLike","secondTypeRequired","secondTypeAllowed","static_member","interface_Interface","every","extAttr","opNames","Set","getOperations","op","partials","mixins","mixinMap","ext","additions","forEachExtension","addition","add","existings","has","checkInterfaceMemberDuplication","mixin_Mixin","field_Field","required","dictionary_Dictionary","namespace_Namespace","callback_interface_CallbackInterface","callback","parseByTokens","options","interface_","opts","definition","res","eof","concrete","definitions","noop","arg","templates","wrap","reference","extendedAttribute","extendedAttributeReference","write","ast","ts","raw","unescaped","wrapper","reference_token","name_token","type_body","it","firstToken","ref","extended_attributes","default_","data","make_ext_at","id","eats","container","inh","iterate","iterable_like","parent","table","interface","interface mixin","namespace","operation","body","attribute","dictionary","field","const","typedef","enum","enum-value","v","iterable","legacyiterable","maplike","setlike","callback interface","things","results","thing","dispatch","getMixinMap","all","Map","include","array","set","validateIterable","duplicates","groupDefinitions","dup","checkDuplicatedNames","flat","__webpack_exports__"],"mappings":"CAAA,SAAAA,EAAAC,GACA,iBAAAC,SAAA,iBAAAC,OACAA,OAAAD,QAAAD,IACA,mBAAAG,eAAAC,IACAD,OAAA,GAAAH,GACA,iBAAAC,QACAA,QAAA,QAAAD,IAEAD,EAAA,QAAAC,IARA,CASCK,KAAA,WACD,mBCTA,IAAAC,EAAA,GAGA,SAAAC,EAAAC,GAGA,GAAAF,EAAAE,GACA,OAAAF,EAAAE,GAAAP,QAGA,IAAAC,EAAAI,EAAAE,GAAA,CACAC,EAAAD,EACAE,GAAA,EACAT,QAAA,IAUA,OANAU,EAAAH,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAQ,GAAA,EAGAR,EAAAD,QA0DA,OArDAM,EAAAM,EAAAF,EAGAJ,EAAAO,EAAAR,EAGAC,EAAAQ,EAAA,SAAAd,EAAAe,EAAAC,GACAV,EAAAW,EAAAjB,EAAAe,IACAG,OAAAC,eAAAnB,EAAAe,EAAA,CAA0CK,YAAA,EAAAC,IAAAL,KAK1CV,EAAAgB,EAAA,SAAAtB,GACA,oBAAAuB,eAAAC,aACAN,OAAAC,eAAAnB,EAAAuB,OAAAC,YAAA,CAAwDC,MAAA,WAExDP,OAAAC,eAAAnB,EAAA,cAAiDyB,OAAA,KAQjDnB,EAAAoB,EAAA,SAAAD,EAAAE,GAEA,GADA,EAAAA,IAAAF,EAAAnB,EAAAmB,IACA,EAAAE,EAAA,OAAAF,EACA,KAAAE,GAAA,iBAAAF,QAAAG,WAAA,OAAAH,EACA,IAAAI,EAAAX,OAAAY,OAAA,MAGA,GAFAxB,EAAAgB,EAAAO,GACAX,OAAAC,eAAAU,EAAA,WAAyCT,YAAA,EAAAK,UACzC,EAAAE,GAAA,iBAAAF,EAAA,QAAAM,KAAAN,EAAAnB,EAAAQ,EAAAe,EAAAE,EAAA,SAAAA,GAAgH,OAAAN,EAAAM,IAAqBC,KAAA,KAAAD,IACrI,OAAAF,GAIAvB,EAAA2B,EAAA,SAAAhC,GACA,IAAAe,EAAAf,KAAA2B,WACA,WAA2B,OAAA3B,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAK,EAAAQ,EAAAE,EAAA,IAAAA,GACAA,GAIAV,EAAAW,EAAA,SAAAiB,EAAAC,GAAsD,OAAAjB,OAAAkB,UAAAC,eAAA1B,KAAAuB,EAAAC,IAGtD7B,EAAAgC,EAAA,GAIAhC,IAAAiC,EAAA,kCCtEA,SAASC,EAAKC,EAAAC,EAAAC,EAAAC,EAAAC,GAId,SAAAC,EAAAC,GACA,OAAAA,EAAA,EACAN,EAAAO,MAAAN,IAAAK,GACAN,EAAAO,MAAAC,KAAAC,IAAAR,EAAAK,EAAA,GAAAL,GAGA,SAAAS,EAAAC,GAAAC,SAAiCA,GAAW,IAC5C,MAAAC,EAAAF,EAAAG,IAAA7B,KAAA8B,OAAA9B,EAAAD,OAAAgC,KAAA,IACAC,EAAAjB,EAAAC,GACA,cAAAgB,EAAAb,KACAS,EAEAD,EACAC,EAAAI,EAAAF,OAEAF,EAAAN,MAAAU,EAAAF,OAAAG,QAGA,MACAC,EACA,QAAAnB,EAAAC,GAAAG,KAAAJ,EAAAC,GAAAkB,KACAnB,EAAAkB,OAAA,EAAAlB,EAAAC,EAAA,GAAAkB,KACA,EAEAC,EArCA,SAAAP,GACA,MAAAQ,EAAAR,EAAAS,MAAA,MACA,OAAAD,IAAAH,OAAA,GAmCAK,CACAb,EAAAL,GAPA,GAOA,CAA2CO,UAAA,KAG3CY,EAAAnB,EAVA,GAWAoB,EAAAf,EAAAc,GAIAE,EAAAN,EAHAK,EAAAH,MAAA,SAGA,MADA,IAAAK,OAAAP,EAAAF,QAAA,KAGAU,EAAA,WAAAxB,EAAA,iBAGAyB,KAAqBzB,mBAAsBe,IAF3CnB,EAAA1B,YAA4C0B,EAAA1B,OAAY,KACxD4B,OAA4C0B,OAAiB1B,EAAA4B,QAAA,gBAAoC5B,EAAAE,QAAgBF,EAAA5B,SAAa,QACrCoD,IACzF,OACAvB,WAAgB0B,KAAW1B,IAC3B4B,YAAA5B,EACA0B,UACAV,OACAa,WAAAhC,EAAA1B,KACA2D,MAAAR,EACAS,OAAAV,GAcO,SAAAW,EAAAnC,EAAAoC,EAAAlC,EAAAC,GACP,OAASJ,EAAKC,EAAAoC,EAAAC,MAAAnC,EAAAC,EAAA,qBCzEd,MAAAmC,EAAA,CAGAC,QAAA,sGACAC,QAAA,8CACAC,WAAA,+BACAC,OAAA,WACAC,WAAA,cACAC,QAAA,iDACAC,MAAA,wBAGOC,EAAA,CACP,aACA,YACA,aAGOC,EAAA,CACP,YACA,WACA,QACA,UACA,aACA,OACA,SACA,WACA,UACA,YACA,WACA,UACA,YACA,UACA,WACA,UACA,SACA,SACA,cACA,UACA,gBAGAC,EAAA,CACA,YACA,cACA,WACA,MACA,UACA,UACA,OACA,SACA,QACA,QACA,aACA,iBACA,OACA,QACA,OACA,QACA,WACA,KACA,WACA,SACA,WACA,QACA,OACA,WACA,QACAC,OAAAF,EAAAD,GAEAI,EAAA,CACA,IACA,IACA,IACA,MACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,KA6FO,MAAMC,EAIbC,YAAAC,GACA1F,KAAAqC,OA5FA,SAAAsD,GACA,MAAApB,EAAA,GACA,IAAAqB,EAAA,EACAxC,EAAA,GACAI,EAAA,EACAkB,EAAA,EACA,KAAAkB,EAAAD,EAAApC,QAAA,CACA,MAAAsC,EAAAF,EAAAG,OAAAF,GACA,IAAAG,GAAA,EAQA,GANA,YAAAC,KAAAH,GACAE,EAAAE,EAAA,cAAgDC,eAAA,IAC3C,MAAAL,IACLE,EAAAE,EAAA,WAA6CC,eAAA,MAG7C,IAAAH,EAAA,CACA,MAAAI,EAAA5B,EAAA6B,MAAA/E,MACAmC,IAAA2C,EAAAE,MAAA,YAAA9C,OACAH,GAAA+C,EACAzB,GAAA,OACK,oBAAAsB,KAAAH,IAKL,IAHA,KADAE,EAAAE,EAAA,cAEAF,EAAAE,EAAA,aAEA,IAAAF,EAAA,CACAA,EAAAE,EAAA,cACA,MAAAxB,EAAAF,IAAAhB,OAAA,IACA,IAAAwC,GAAAV,EAAAiB,SAAA7B,EAAApD,SACAoD,EAAAhC,KAAAgC,EAAApD,YAGK,MAAAwE,IACLE,EAAAE,EAAA,WAGA,UAAAM,KAAAhB,EACA,GAAAI,EAAAa,WAAAD,EAAAX,GAAA,CACArB,EAAAkC,KAAA,CAAqBhE,KAAA8D,EAAAlF,MAAAkF,EAAAnD,SAAAI,OAAAkB,UACrBtB,EAAA,GAEA2C,EADAH,GAAAW,EAAAhD,OAEA,MAQA,IAHA,IAAAwC,IACAA,EAAAE,EAAA,WAEA,IAAAF,EACA,UAAAW,MAAA,gCAEAd,EAAAG,EACArB,GAAA,EAUA,OANAH,EAAAkC,KAAA,CACAhE,KAAA,MACApB,MAAA,GACA+B,WAGAmB,EAOA,SAAA0B,EAAAxD,GAAAyD,cAAoCA,GAAgB,IACpD,MAAAS,EAAAhC,EAAAlC,GACAkE,EAAAC,UAAAhB,EACA,MAAAG,EAAAY,EAAAE,KAAAlB,GACA,OAAAI,GACAxB,EAAAkC,KAAA,CAAmBhE,OAAApB,MAAA0E,EAAA,GAAA3C,SAAAI,OAAAkB,UACnBwB,IACA9C,EAAA,IAEAuD,EAAAC,YAEA,GASAE,CAAApB,GACA1F,KAAAsC,SAAA,EAMAmD,MAAAjD,GACA,UAAAuE,ED7HO,SAAA1E,EAAAC,EAAAC,EAAAC,GACP,OAASJ,EAAKC,EAAAC,EAAAC,EAAAC,EAAA,UC4HiBwE,CAAWhH,KAAAqC,OAAArC,KAAAsC,SAAAtC,KAAAuC,QAAAC,IAM1CiD,MAAAhD,GACA,OAAAzC,KAAAqC,OAAAkB,OAAAvD,KAAAsC,UAAAtC,KAAAqC,OAAArC,KAAAsC,UAAAG,SAMAgD,WAAAwB,GACA,UAAAxE,KAAAwE,EAAA,CACA,IAAAjH,KAAAkH,MAAAzE,GAAA,SACA,MAAAgC,EAAAzE,KAAAqC,OAAArC,KAAAsC,UAEA,OADAtC,KAAAsC,WACAmC,GAOAgB,UAAAnD,GACAtC,KAAAsC,YAIA,MAAAyE,UAAAL,MACAjB,aAAAjD,QAAeA,EAAA4B,cAAAF,UAAAV,OAAAa,aAAAC,QAAAC,WACf4C,MAAA3E,GAEAxC,KAAAW,KAAA,mBACAX,KAAAoE,cACApE,KAAAkE,UACAlE,KAAAwD,OACAxD,KAAAqE,aACArE,KAAAsE,QACAtE,KAAAuE,UC1OO,MAAA6C,EACP3B,aAAApD,OAAeA,EAAAkC,WACfzD,OAAAuG,iBAAArH,KAAA,CACAqC,OAAA,CAAehB,MAAAgB,GACfkC,OAAA,CAAelD,MAAAkD,KAIfkB,SACA,MAAA6B,EAAA,CAAkB7E,UAAA8E,EAAA5G,UAAA4G,EAAAC,iBAAAD,GAClB,IAAAE,EAAAzH,KACA,KAAAyH,IAAA3G,OAAAkB,WAAA,CACA,MAAA0F,EAAA5G,OAAA6G,0BAAAF,GACA,UAAA9F,EAAAN,KAAAP,OAAA8G,QAAAF,IACArG,EAAAL,YAAAK,EAAAJ,OACAqG,EAAA3F,GAAA3B,KAAA2B,IAGA8F,EAAA3G,OAAA+G,eAAAJ,GAEA,OAAAH,GC2BA,SAAAQ,EAAAC,EAAAC,GACA,MAAAC,EAAAF,EAAAG,QAAA,KACAD,IACAD,EAAAzD,OAAA0D,YAEAF,EAAAb,MAAA,MAAAa,EAAAI,MAAA,iCAOA,SAAAC,EAAAL,EAAAM,GACA,IAAAC,EApDA,SAAAP,EAAAM,GACA,MAAAE,EAAAR,EAAAG,QAAA,6CACA,IAAAK,EACA,OAEA,MAAAD,EAAA,IAAkBE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,UAE5D,OADAD,EAAA/D,OAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,kCAA0FI,EAAA9F,QAC1F8F,EAAA9F,MACA,eACAsF,EAAAb,MAAA,MAAAa,EAAAI,MAAA,+CACA,MAAAO,EAAsBC,EAAWZ,EAAAM,IAAAN,EAAAI,MAAA,2BACjCG,EAAAI,QAAAjC,KAAAiC,GACA,MAEA,eACA,mBACA,MAAAA,EAAsBE,EAA6Bb,EAAAM,IAAAN,EAAAI,iBAAoDI,EAAA9F,gBACvG6F,EAAAI,QAAAjC,KAAAiC,GACA,MAEA,cACAX,EAAAb,MAAA,MAAAa,EAAAI,MAAA,6CACA,MAAAU,EAAAd,EAAAG,WAA2C/C,IAAW4C,EAAAI,oCAAmDhD,EAAW9B,KAAA,SACpHyF,EAAA,IAA6BN,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,KAAAM,KACvEC,EAAAvE,OAAAwE,UAAAhB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,uCACAW,EAAArG,KAAA4F,EACA,MAAAW,EAAwBJ,EAA6Bb,EAAAM,IAAAN,EAAAI,MAAA,qCACrDG,EAAAI,QAAAjC,KAAAqC,EAAAE,GACA,OAKA,OAFAV,EAAAW,SAAAlB,EAAAI,oCAAkEI,EAAA9F,QAClE6F,EAAA/D,OAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,uCAAgGI,EAAA9F,QAChG6F,EAmBAa,CAAApB,EAAAM,IAAiDe,EAAcrB,GAC/D,IAAAO,EAAA,CACA,MAAAC,EAAAR,EAAAG,QAAA,gBAAoD/C,GACpD,IAAAoD,EACA,OAEAD,EAAA,IAAcE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,UACxDR,EAAAb,MAAA,MAAAa,EAAAI,kCAA0EI,EAAAlH,SAQ1E,MANA,YAAAiH,EAAAe,SAAAtB,EAAAb,MAAA,MACAa,EAAAI,MAAA,mCAEAG,EAAA7F,KAAA4F,GAAA,KACAP,EAAAC,EAAAO,GACAA,EAAAL,UAAA,QAAAK,EAAAW,SAAAlB,EAAAI,MAAA,sCACAG,EA+BO,MAAME,UAAapB,EAK1B3B,aAAAsC,EAAAM,GACA,OAAAD,EAAAL,EAAAM,IA9BA,SAAAN,EAAAtF,GACA,MAAA8B,EAAA,GAEA,GADAA,EAAAkE,KAAAV,EAAAG,QAAA,MACA3D,EAAAkE,KAAA,OACA,MAAAH,EAAA,IAAkBE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,WAExB,IADA+D,EAAA7F,QAAA,OACA,CACA,MAAA6G,EAAgBV,EAA6Bb,MAAAI,MAAA,wDAC7C,QAAAmB,EAAAL,SAAAlB,EAAAI,MAAA,iDACAG,EAAAI,QAAAjC,KAAA6C,GACA,MAAAC,EAAAxB,EAAAG,QAAA,MACA,IAAAqB,EAGA,MAFAD,EAAA/E,OAAAwE,UAAAQ,EASA,OALAjB,EAAAW,QAAA1F,OAAA,GACAwE,EAAAI,MAAA,kEAEA5D,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,2BACAL,EAAAC,EAAAO,GACAA,EASAkB,CAAAzB,EAAAM,GAGA5C,aAAApD,OAAeA,EAAAkC,WACf4C,MAAA,CAAW9E,SAAAkC,WACXzD,OAAAC,eAAAf,KAAA,WAA4CqB,MAAA,KAC5CrB,KAAAyJ,SAAA,GAGAJ,cACA,OAAArJ,KAAA0I,QAAAnF,QAAAvD,KAAAuE,OAAAgE,KACAvI,KAAAuE,OAAAgE,KAAAlH,MAEA,GAEA4G,eACA,OAAAyB,QAAA1J,KAAAuE,OAAA0D,UAEA0B,YACA,OAAAD,QAAA1J,KAAA0I,QAAAnF,UAAAvD,KAAAuE,OAAAgE,KAEAU,cACA,GAAAjJ,KAAA0I,QAAAnF,OACA,OAAAvD,KAAA0I,QAQA,OAAWkB,EALX,CACA5J,KAAAuE,OAAAsF,OACA7J,KAAAuE,OAAAgE,KACAvI,KAAAuE,OAAAuF,SACAC,OAAAzI,MAAA6B,IAAA7B,KAAAD,OAAAgC,KAAA,OC3IO,MAAM2G,UAAgB5C,EAI7B3B,aAAAsC,GACA,MAAAkC,EAAAlC,EAAAG,QAAA,KACA,IAAA+B,EACA,YAEA,MAAAC,EAAgBC,EAAWpC,MAAAG,QAAA,0BAA0DH,EAAAI,MAAA,wBACrFiC,EAAA,CAAAF,GACA,SAAAA,EAAAzH,KAAA,CACA,MAAAyG,EAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,wCACAiC,EAAA3D,KAAAyC,QACK,SAAAgB,EAAAzH,KAAyB,CAC9B,MAAAyG,EAAAnB,EAAAG,QAAA,MAAwCH,EAAAI,MAAA,0CACxCiC,EAAA3D,KAAAyC,GAEA,WAAec,EAAO,CAAE3H,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoC0F,UAASG,eAGrE3E,aAAApD,OAAeA,EAAAkC,SAAA6F,eACfjD,MAAA,CAAW9E,SAAAkC,WACXzD,OAAAC,eAAAf,KAAA,cAA+CqB,MAAA+I,IAG/C3H,WACA,OAAW4H,EAAUrK,KAAAoK,WAAA,IAAA3H,KAErBpB,YACA,OAAWgJ,EAAUrK,KAAAoK,WAAA,IAAA/I,MAErBiJ,eACA,OAAWD,EAAUrK,KAAAoK,WAAA,IAAAE,UCpCd,MAAAC,UAAAC,MACP/E,aAAApD,OAAeA,EAAAkC,WACf4C,QACArG,OAAAuG,iBAAArH,KAAA,CACAqC,OAAA,CAAehB,MAAAgB,GACfkC,OAAA,CAAelD,MAAAkD,MCDf,MAAMkG,UAAoCrD,EAI1C3B,aAAAsC,GACA,MAAAxD,EAAA,CAAoB0F,OAAAlC,EAAAG,QAAA,MACpBI,EAAA,IAAoBmC,EAA2B,CAAEpI,OAAA0F,EAAA1F,OAAAkC,WAejD,OAdAA,EAAA0F,SACA1F,EAAAmG,cAAA3C,EAAAG,QAAA,4CAEA3D,EAAAkE,KAAAV,EAAAG,QAAA,KACA3D,EAAAkE,MACAH,EAAAqC,KAAA,oBAAArC,EAAAsC,QCiGO,SAAA7C,GACP,MAAA8C,EAAAF,EAAA5C,EAAA,CAA+B+C,OAASC,EAAKD,OAAA/C,EAAA,cAAAiD,SAAA,oBAC7CH,EAAAtH,QACAwE,EAAAI,MAAA,uCAEA,OAAA0C,EDpGQI,CAAWlD,GAEXmD,EAAanD,GACrBxD,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,yDACKG,EAAA6C,SAAA5G,EAAAmG,eACL3C,EAAAI,MAAA,uDAEAG,EAGAsC,cACA,OAAA5K,KAAAuE,OAAA0F,OACAjK,KAAAuE,OAAAmG,cACA1K,KAAAuE,OAAAmG,cAAAjI,KADA,kBADA,MAMA,MAAM2I,UAAgChE,EAItC3B,aAAAsC,GACA,MAAApH,EAAAoH,EAAAG,QAAA,cACA,GAAAvH,EACA,WAAiByK,EAAuB,CACxC7G,OAAA,CAAiB5D,QACjB0K,OAAgBZ,EAA2Ba,MAAAvD,KAK3CtC,aAAApD,OAAeA,EAAAkC,SAAA8G,WACflE,MAAA,CAAW9E,SAAAkC,WACXzD,OAAAC,eAAAf,KAAA,UAA2CqB,MAAAgK,IAG3C5I,WACA,2BAEA9B,WACA,OAAAX,KAAAuE,OAAA5D,KAAAU,MAEAkK,UACA,MAAWX,QAAAnI,EAAA8B,SAAAoG,QAA8B3K,KAAAqL,OACzC,OAAA5I,EAIA,CAAYA,OAAApB,MADZ,oBAAAoB,EAAAkI,EAAApG,EAAAmG,cAAArJ,OAFA,KAKAmK,gBACA,MAAAZ,QAAWA,EAAAD,QAAgB3K,KAAAqL,OAC3B,OAAAV,GAAA,oBAAAC,EAGAD,EAFA,IAQO,MAAMc,UAA2BlB,EAIxC9E,aAAAsC,GACA,MAAAxD,EAAA,GAEA,GADAA,EAAAkE,KAAAV,EAAAG,QAAA,MACA3D,EAAAkE,KAAA,SACA,MAAAH,EAAA,IAAoBmD,EAAkB,CAAEpJ,OAAA0F,EAAA1F,OAAAkC,WAYxC,OAXA+D,EAAA7B,QAAgBkE,EAAI5C,EAAA,CACpB+C,OAAcM,EAAuBE,MACrCN,SAAA,wBAEAzG,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,kDACAG,EAAA/E,QACAwE,EAAAI,MAAA,qCAEAJ,EAAAb,MAAA,MACAa,EAAAI,MAAA,kEAEAG,GE7FO,MAAMoD,UAAiBtE,EAI9B3B,aAAAsC,GACA,MAAA4D,EAAA5D,EAAAzF,SACAiC,EAAA,GACA+D,EAAA,IAAoBoD,EAAQ,CAAErJ,OAAA0F,EAAA1F,OAAAkC,WAI9B,OAHA+D,EAAAmB,SAAmBgC,EAAkBH,MAAAvD,GACrCxD,EAAAqH,SAAA7D,EAAAG,QAAA,YACAI,EAAAW,QAAkBL,EAA6Bb,EAAA,iBAC/CO,EAAAW,SAGA1E,EAAAqH,WACArH,EAAAsH,SAAA9D,EAAAG,QAAA,QAEA3D,EAAA5D,KAAAoH,EAAAG,QAAA,gBAAqD9C,GACrDb,EAAA5D,MAGA2H,EAAAwD,QAAAvH,EAAAqH,SAAoC5B,EAAOsB,MAAAvD,GAAA,KAC3CO,GAHAP,EAAAgE,UAAAJ,IAPA5D,EAAAgE,UAAAJ,GAaAC,eACA,QAAA5L,KAAAuE,OAAAqH,SAEAC,eACA,QAAA7L,KAAAuE,OAAAsH,SAEAlL,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCpCZ,MAAM0J,UAAc3D,EAK3B3B,cAAAsC,EAAAtF,GACA,WACA,MAAApB,EAAA0G,EAAAG,QAAAzF,GACA,GAAApB,EACA,WAAmB0J,EAAK,CAAE1I,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoClD,YAK9DA,YACA,OAAArB,KAAAuE,OAAAlD,aCjBO,SAAA2K,EAAA/C,EAAAgD,GACP,IAAAhD,EAAAU,MAAA,CACA,MAAAO,EAAA+B,EAAAC,OAAAjL,IAAAgI,WACA,QAAAiB,IAGA,YAAAA,EAAAzH,KACAuJ,EAAA9B,EAAAjB,QAAAgD,GAEA,eAAA/B,EAAAzH,MAEA,UAAAiG,KAAAO,EAAAP,QACA,GAAAsD,EAAAtD,EAAAuD,GACA,SAGA,SCXO,MAAME,UAAkB/E,EAI/B3B,aAAAsC,GAAAqE,QAA2BA,EAAAC,WAAmB,IAC9C,MAAA9H,EAAA,CAAoB6H,WACpB9D,EAAA,IAAoB6D,EAAS,CAAE9J,OAAA0F,EAAA1F,OAAAkC,WAC/B,OAAA6H,GAAA,gBAAAA,EAAA/K,QACAkD,EAAA+H,YAAAvE,EAAAG,QAAA,KACA3D,EAAA+H,cACAhE,EAAAkD,UAAA,GACAlD,IAGA8D,GAAAC,IACA9H,EAAA6H,QAAArE,EAAAG,QAAA,8BAEAI,EAAAW,QAAkBN,EAAWZ,MAAAI,MAAA,uBAC7B5D,EAAA5D,KAAAoH,EAAAG,QAAA,cACA3D,EAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,MAAA,qBACAG,EAAAkD,UAAoBN,EAAanD,GACjCxD,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,0BACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,wCAC7CG,GAGA7F,WACA,kBAEA9B,WACA,MAAAA,KAAWA,GAAOX,KAAAuE,OAClB,OAAA5D,EAGWiJ,EAAQjJ,EAAAU,OAFnB,GAIA+K,cACA,OAAApM,KAAAuE,OAAA6H,QAGApM,KAAAuE,OAAA6H,QAAA/K,MAFA,GAKAoE,UAAAwG,GACA,UAAAM,KAAAvM,KAAAwL,UACA,GAAUQ,EAAyBO,EAAAtD,QAAAgD,KACnCM,EAAAT,QAAA,CACA,MAAAtJ,EAAA,yEACgBgC,EAAexE,KAAAqC,OAAAkK,EAAAhI,OAAA5D,KAAAX,KAAAwC,KClDxB,MAAMgK,UAAkBpF,EAI/B3B,aAAAsC,GAAAqE,QAA2BA,EAAAK,aAAA,EAAAC,YAAA,GAA+C,IAC1E,MAAAf,EAAA5D,EAAAzF,SACAiC,EAAA,CAAoB6H,WACpB9D,EAAA,IAAoBkE,EAAS,CAAEnK,OAAA0F,EAAA1F,OAAAkC,WAY/B,GAXA6H,GAAAK,IACAlI,EAAA6H,QAAArE,EAAAG,QAAA,YAEA,YAAAI,EAAA8D,SAAArE,EAAAb,MAAA,aACAa,EAAAI,MAAA,4CAEA5D,EAAAmI,SAAA3E,EAAAG,QAAA,YACAwE,IAAAnI,EAAAmI,UAAA3E,EAAAb,MAAA,cACAa,EAAAI,MAAA,+CAEA5D,EAAAgE,KAAAR,EAAAG,QAAA,aACA3D,EAAAgE,KAAA,CAKA,OADAD,EAAAW,QAAkBL,EAA6Bb,EAAA,mBAAAA,EAAAI,MAAA,0BAC/CG,EAAAW,QAAAI,SACA,eACA,aAAAtB,EAAAI,kCAAiEG,EAAAW,QAAAI,iBAIjE,OAFA9E,EAAA5D,KAAAoH,EAAAG,QAAA,0BAAAH,EAAAI,MAAA,0BACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,wCAC7CG,EAVAP,EAAAgE,UAAAJ,GAaAlJ,WACA,kBAEA2J,cACA,OAAApM,KAAAuE,OAAA6H,QAGApM,KAAAuE,OAAA6H,QAAA/K,MAFA,GAIAqL,eACA,QAAA1M,KAAAuE,OAAAmI,SAEA/L,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QLvCZ,SAASuI,EAAQ9E,GACxB,OAAAA,EAAA0B,WAAA,KAAA1B,EAAAlC,MAAA,GAAAkC,EAWO,SAAA6F,EAAA5C,GAAA+C,OAA0BA,EAAA6B,eAAA3B,WAAA,SACjC,MAAA4B,EAAA9B,EAAA/C,GACA,IAAA6E,EACA,SAEAA,EAAArI,OAAAwE,UAAAhB,EAAAG,QAAA,KACA,MAAA2E,EAAA,CAAAD,GACA,KAAAA,EAAArI,OAAAwE,WAAA,CACA,MAAA+D,EAAAhC,EAAA/C,GACA,IAAA+E,EAAA,CACAH,GACA5E,EAAAI,2BAA6C6C,KAE7C,MAIA,GAFA8B,EAAAvI,OAAAwE,UAAAhB,EAAAG,QAAA,KACA2E,EAAApG,KAAAqG,IACAA,EAAAvI,OAAAwE,UAAA,MAEA,OAAA8D,EAMO,SAAA1C,EAAApC,GACP,OAAAA,EAAAG,QAAA,iEAQO,SAAAmC,GAAA5H,KAAqBA,EAAApB,UAC5B,OAAAoB,GACA,WACA,YACA,OAAcA,KAAA,UAAApB,MAAA,SAAAoB,GACd,eACA,gBACA,OAAcA,KAAA,WAAA6H,SAAA7H,EAAA+D,WAAA,MACd,QACA,OAAc/D,KAAA,WAAApB,MAAA,IACd,QACA,OAAcoB,KAAA,cACd,cACA,cACA,OAAcA,KAAA,SAAApB,SACd,aACA,OAAcoB,KAAA,SAAApB,QAAAuB,MAAA,OACd,QACA,OAAcH,SAOP,SAAA2G,EAAArB,GAoBP,MAAA1F,OAASA,GAAS0F,EAClBgF,EApBA,WACA,MAAAlD,EAAA9B,EAAAG,QAAA,YACAK,EAAAR,EAAAG,QAAA,gBACA,GAAAK,EAAA,CACA,MAAAuB,EAAA/B,EAAAG,QAAA,QACA,WAAiBM,EAAI,CAAEnG,SAAAkC,OAAA,CAAkBsF,SAAAtB,OAAAuB,aAEzCD,GAAA9B,EAAAI,MAAA,gCAaA6E,IAVA,WACA,MAAAnD,EAAA9B,EAAAG,QAAA,gBACAK,EAAAR,EAAAG,QAAA,kBACA,GAAAK,EACA,WAAiBC,EAAI,CAAEnG,SAAAkC,OAAA,CAAkBsF,SAAAtB,UAEzCsB,GAAA9B,EAAAI,MAAA,8BAIA8E,GACA,GAAAF,EAAA,OAAAA,EACA,MAAAxE,EAAAR,EAAAG,QAAA,0BACA,OAAAK,EACA,IAAeC,EAAI,CAAEnG,SAAAkC,OAAA,CAAkBgE,eADvC,EAmBO,SAAA2C,EAAAnD,GACP,OAAA4C,EAAA5C,EAAA,CAA0B+C,OAASY,EAAQJ,MAAAN,SAAA,mBAOpC,SAAApC,EAAAb,EAAAM,GACP,MAAAoB,EAAmBgC,EAAkBH,MAAAvD,GACrCO,EAAcE,EAAI8C,MAAAvD,EAAAM,GAElB,OADAC,MAAAmB,YACAnB,EAOO,SAAAK,EAAAZ,EAAAM,GACP,MAAAiB,EAAcd,EAAI8C,MAAAvD,EAAAM,GAAA,eAClB,GAAAiB,EACA,OAAAA,EAEA,MAAA4D,EAAAnF,EAAAG,QAAA,QACA,GAAAgF,EAAA,CACA,MAAA5E,EAAA,IAAoBE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,KAAA2E,KAE9D,OADA5E,EAAA7F,KAAA,cACA6F,GAOO,SAAA6E,EAAApF,GACP,MAAAqE,EAAArE,EAAAG,QAAA,eACA,GAAAkE,EAIA,OAHiBI,EAASlB,MAAAvD,EAAA,CAAmBqE,aACzCD,EAASb,MAAAvD,EAAA,CAAmBqE,aAChCrE,EAAAI,MAAA,4BMhKA,MAAMiF,UAAkBrC,EAIxBtF,aAAAsC,GACA,MAAA1G,EAAA0G,EAAAG,QAAA,UACA,GAAA7G,EACA,WAAiB+L,EAAS,CAAE/K,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoClD,WAIhEoB,WACA,mBAEApB,YACA,OAAA8F,MAAA9F,MAAAuB,MAAA,OAIO,MAAMyK,UAAajG,EAI1B3B,aAAAsC,GACA,MAAAxD,EAAA,GAEA,GADAA,EAAAgE,KAAAR,EAAAG,QAAA,SACA3D,EAAAgE,KACA,OAEAhE,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,oBACA,MAAAG,EAAAP,EAAAxF,QAAA,IAAwC8K,EAAI,CAAEhL,OAAA0F,EAAA1F,OAAAkC,WAe9C,OAdAA,EAAAkE,KAAAV,EAAAG,QAAA,MAAsCH,EAAAI,MAAA,iBACtCG,EAAAgF,OAAiB3C,EAAI5C,EAAA,CACrB+C,OAAcsC,EAAS9B,MACvBqB,cAAA,EACA3B,SAAA,gBAEAjD,EAAAb,MAAA,WACAa,EAAAI,MAAA,gCAEA5D,EAAA2E,MAAAnB,EAAAG,QAAA,MAAuCH,EAAAI,MAAA,4BACvCG,EAAAgF,OAAA/J,QACAwE,EAAAI,MAAA,oBAEA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,2BAC7CG,EAGA7F,WACA,aAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCrDZ,MAAMkM,UAAiBnG,EAI9B3B,aAAAsC,GACA,MAAAyF,EAAAzF,EAAAG,QAAA,cACA,IAAAsF,EACA,OAEA,MAAAjJ,EAAA,CAAoBiJ,UAEpB,GADAjJ,EAAA+B,SAAAyB,EAAAG,QAAA,YACA3D,EAAA+B,SAMA,OAFA/B,EAAAkJ,MAAA1F,EAAAG,QAAA,eAAAH,EAAAI,MAAA,iCACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,2CAC7C,IAAeoF,EAAQ,CAAElL,OAAA0F,EAAA1F,OAAAkC,WALzBwD,EAAAgE,UAAAyB,EAAA9I,OAQAjC,WACA,iBAEA+K,aACA,OAAW5D,EAAQ5J,KAAAuE,OAAAiJ,OAAAnM,OAEnBiF,eACA,OAAWsD,EAAQ5J,KAAAuE,OAAAkJ,MAAApM,QC3BZ,MAAMqM,UAAgBtG,EAI7B3B,aAAAsC,GACA,MAAAxD,EAAA,GACA+D,EAAA,IAAoBoF,EAAO,CAAErL,OAAA0F,EAAA1F,OAAAkC,WAE7B,GADAA,EAAAgE,KAAAR,EAAAG,QAAA,WACA3D,EAAAgE,KAOA,OAJAD,EAAAW,QAAkBL,EAA6Bb,EAAA,iBAAAA,EAAAI,MAAA,wBAC/C5D,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,wBACAJ,EAAAxF,QAAA+F,EACA/D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,sCAC7CG,EAGA7F,WACA,gBAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCtBZ,MAAMsM,UAAyBvG,EAItC3B,aAAAsC,EAAAQ,GACA,MAAAhE,EAAA,CAAoBgE,QACpBD,EAAA,IAAoBqF,EAAgB,CAAEtL,OAAA0F,EAAA1F,OAAAkC,WAStC,OARAA,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,yBACAJ,EAAAxF,QAAA+F,EACA/D,EAAA0F,OAAAlC,EAAAG,QAAA,MAAAH,EAAAI,MAAA,gCACAG,EAAAW,QAAkBN,EAAWZ,MAAAI,MAAA,gCAC7B5D,EAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,MAAA,4CACAG,EAAAkD,UAAoBN,EAAanD,GACjCxD,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,yBACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,uCAC7CG,EAGA7F,WACA,iBAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCTZ,MAAMuM,UAAkBxG,EAM/B3B,aAAAsC,EAAA8F,GAAApL,KAAuCA,EAAAqL,cAAAC,mBACvC,MAAAxJ,OAAaA,GAASsJ,EAQtB,IAPAtJ,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,yBAA0F0F,EAAApL,QAC1FsF,EAAAxF,QAAAsL,EACAC,GACAhN,OAAAmJ,OAAA1F,EApBA,SAAAwD,GACA,MAAAiG,EAAAjG,EAAAG,QAAA,KACA,OAAA8F,EAIA,CAAUA,QAAAxG,YADVO,EAAAG,QAAA,eAAAH,EAAAI,MAAA,6BAFA,GAiBAX,CAAAO,IAEAxD,EAAAkE,KAAAV,EAAAG,QAAA,MAAwCH,EAAAI,kBAAkC1F,KAC1EoL,EAAAI,QAAA,KACA,CAEA,GADA1J,EAAA2E,MAAAnB,EAAAG,QAAA,KACA3D,EAAA2E,MAEA,OADA3E,EAAA+H,YAAAvE,EAAAG,QAAA,MAAmDH,EAAAI,iCAAiD1F,KACpGoL,EAEA,MAAAK,EAAmBzC,EAAkBH,MAAAvD,GACrC,IAAAoG,EACA,UAAArD,KAAAsD,KAAAL,EAEA,GADAI,EAAArD,EAAA/C,KAAAqG,GAEA,MAGAD,GACApG,EAAAI,MAAA,kBAEAgG,EAAA1E,SAAAyE,EACAL,EAAAI,QAAAxH,KAAA0H,IAIAhK,cACA,QAAAnE,KAAAuE,OAAAJ,QAEAxD,WACA,OAAaiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,OAErBmG,kBACA,OAAAxH,KAAAuE,OAAAiD,YAGaoC,EAAQ5J,KAAAuE,OAAAiD,YAAAnG,OAFrB,KAKAoE,UAAAwG,GACA,UAAAoC,KAAArO,KAAAiO,QACAI,EAAAC,iBACAD,EAAAC,SAAArC,KCjEO,MAAMsC,UAAiBnH,EAI9B3B,aAAAsC,GACA,MAAAxD,EAAA,GAEA,GADAA,EAAAgE,KAAAR,EAAAG,QAAA,UACA3D,EAAAgE,KACA,OAEA,IAAAU,EAAkBG,EAAcrB,GAChC,IAAAkB,EAAA,CACA,MAAAV,EAAAR,EAAAG,QAAA,eAAAH,EAAAI,MAAA,sBACAc,EAAA,IAAoBT,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,UAE9DR,EAAAb,MAAA,MACAa,EAAAI,MAAA,qCAEAc,EAAAxG,KAAA,aACA8B,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,sBACA5D,EAAA0F,OAAAlC,EAAAG,QAAA,MAAAH,EAAAI,MAAA,gCACA5D,EAAAlD,MAAmB8I,EAAWpC,MAAAI,MAAA,uBAC9B5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,oCAC7C,MAAAG,EAAA,IAAoBiG,EAAQ,CAAElM,OAAA0F,EAAA1F,OAAAkC,WAE9B,OADA+D,EAAAW,UACAX,EAGA7F,WACA,cAEA9B,WACA,OAAA6N,SAAAxO,KAAAuE,OAAA5D,KAAAU,OAEAA,YACA,OAAWgJ,EAAUrK,KAAAuE,OAAAlD,QCpCd,MAAMoN,UAAqBrH,EAIlC3B,aAAAsC,GACA,MAAA4D,EAAA5D,EAAAzF,SACAiC,EAAA,GACA+D,EAAA,IAAoBmG,EAAY,CAAEpM,OAAA0F,EAAA1F,OAAAkC,WAKlC,GAJAA,EAAAmI,SAAA3E,EAAAG,QAAA,YACA3D,EAAAgE,KAAAhE,EAAAmI,SACA3E,EAAAG,QAAA,qBACAH,EAAAG,QAAA,iCACA3D,EAAAgE,KAEA,YADAR,EAAAgE,UAAAJ,GAIA,MAAAlJ,KAAWA,GAAO6F,EAClBoG,EAAA,YAAAjM,EACAkM,EAAAD,GAAA,aAAAjM,EAEA8B,EAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,yCAA+F1F,iBAC/F,MAAAmK,EAAkBhE,EAA6Bb,MAAAI,oCAA6D1F,iBAa5G,OAZA6F,EAAAW,QAAA,CAAA2D,GACA+B,IACA/B,EAAArI,OAAAwE,UAAAhB,EAAAG,QAAA,KACA0E,EAAArI,OAAAwE,UACAT,EAAAW,QAAAxC,KAAyBmC,EAA6Bb,IAEtD2G,GACA3G,EAAAI,yCAAyD1F,kBAEzD8B,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,4CAAmG1F,iBACnG8B,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,iCAAiD1F,iBAE9F6F,EAGA7F,WACA,OAAAzC,KAAAuE,OAAAgE,KAAAlH,MAEAqL,eACA,QAAA1M,KAAAuE,OAAAmI,UCjCA,SAAAkC,EAAA7G,GACA,MAAAqE,EAAArE,EAAAG,QAAA,UACA,GAAAkE,EAIA,OAHiBI,EAASlB,MAAAvD,EAAA,CAAmBqE,aACzCD,EAASb,MAAAvD,EAAA,CAAmBqE,aAChCrE,EAAAI,MAAA,4BAIO,MAAM0G,UAAkBjB,EAI/BnI,aAAAsC,EAAAQ,GAAApE,QAAiCA,EAAA,MAAiB,IAClD,MAAAI,EAAA,CAAoBJ,UAAAoE,QACpB,OAAWqF,EAAStC,MAAAvD,EAAA,IAAsB8G,EAAS,CAAExM,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACxF9B,KAAA,YACAqL,aAAA3J,EACA4J,eAAA,CACA,CAASQ,EAAQjD,OACjB,CAAAsD,GACA,CAASzB,GACT,CAASsB,EAAYnD,OACrB,CAASkB,EAASlB,OAClB,CAASa,EAASb,UAKlB7I,WACA,kBAGAgD,UAAAwG,GACA,IAAAjM,KAAAmE,SAAAnE,KAAAyJ,SAAAqF,MAAAC,GAAA,YAAAA,EAAApO,MAAA,CACA,MAAA6B,EAAA,oTAKYgC,EAAexE,KAAAqC,OAAArC,KAAAuE,OAAA5D,KAAAX,KAAAwC,SAE3B2E,MAAAmH,SAAArC,GACAjM,KAAAmE,gBCrDO,UAAA8H,EAAA7L,GACP,MAAA4O,EAAA,IAAAC,IAAAC,EAAA9O,GAAA+C,IAAAgM,KAAAxO,OACAyO,EAAAnD,EAAAmD,SAAAnO,IAAAb,EAAAO,OAAA,GACA0O,EAAApD,EAAAqD,SAAArO,IAAAb,EAAAO,OAAA,GACA,UAAA4O,IAAA,IAAAH,KAAAC,GAAA,CACA,MAAAG,EAAAN,EAAAK,SACAE,EAAAD,EAAAR,EAAAO,EAAAnP,GACA,UAAAsP,KAAAF,EACAR,EAAAW,IAAAD,EAAA/O,MAIA,SAAA8O,EAAAD,EAAAI,EAAAL,EAAAhH,GACA,UAAAmH,KAAAF,EAAA,CACA,MAAA7O,KAAaA,GAAO+O,EACpB,GAAA/O,GAAAiP,EAAAC,IAAAlP,GAAA,CACA,MAAA6B,oBAA0C7B,uDAA0D4H,EAAA5H,6CACtF6D,EAAe+K,EAAAlN,OAAAqN,EAAAnL,OAAA5D,KAAA4O,EAAA/M,KAK7B,SAAA0M,EAAA9O,GACA,OAAAA,EAAA6N,QACAlE,OAAA,EAAgBtH,UAAK,cAAAA,ID8BRqN,CAA+B7D,EAAAjM,QElDrC,MAAM+P,UAAcnC,EAI3BnI,aAAAsC,EAAAQ,GAAApE,QAAiCA,GAAU,IAC3C,MAAAI,EAAA,CAAoBJ,UAAAoE,QAEpB,GADAhE,EAAAkJ,MAAA1F,EAAAG,QAAA,SACA3D,EAAAkJ,MAGA,OAAWG,EAAStC,MAAAvD,EAAA,IAAsBgI,EAAK,CAAE1N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACpF9B,KAAA,kBACAsL,eAAA,CACA,CAASQ,EAAQjD,OACjB,CAAS6B,GACT,CAASX,EAASlB,MAAA,CAASmB,WAAA,IAC3B,CAASN,EAASb,MAAA,CAASe,SAAA,OAK3B5J,WACA,yBCvBO,MAAMuN,UAAc5I,EAI3B3B,aAAAsC,GACA,MAAAxD,EAAA,GACA+D,EAAA,IAAoB0H,EAAK,CAAE3N,OAAA0F,EAAA1F,OAAAkC,WAQ3B,OAPA+D,EAAAmB,SAAmBgC,EAAkBH,MAAAvD,GACrCxD,EAAA0L,SAAAlI,EAAAG,QAAA,YACAI,EAAAW,QAAkBL,EAA6Bb,EAAA,oBAAAA,EAAAI,MAAA,kCAC/C5D,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,kCACAG,EAAAwD,QAAkB9B,EAAOsB,MAAAvD,GACzBxD,EAAA0L,UAAA3H,EAAAwD,SAAA/D,EAAAI,MAAA,2CACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,gDAC7CG,EAGA7F,WACA,cAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,OAEnB4O,eACA,QAAAjQ,KAAAuE,OAAA0L,UC1BO,MAAMC,UAAmBtC,EAIhCnI,aAAAsC,GAAA5D,QAA2BA,GAAU,IACrC,MAAAI,EAAA,CAAoBJ,WAEpB,GADAI,EAAAgE,KAAAR,EAAAG,QAAA,cACA3D,EAAAgE,KAGA,OAAWqF,EAAStC,MAAAvD,EAAA,IAAsBmI,EAAU,CAAE7N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACzF9B,KAAA,aACAqL,aAAA3J,EACA4J,eAAA,CACA,CAASiC,EAAK1E,UAKd7I,WACA,oBClBO,MAAM0N,UAAkBvC,EAI/BnI,aAAAsC,GAAA5D,QAA2BA,GAAU,IACrC,MAAAI,EAAA,CAAoBJ,WAEpB,GADAI,EAAAgE,KAAAR,EAAAG,QAAA,aACA3D,EAAAgE,KAGA,OAAWqF,EAAStC,MAAAvD,EAAA,IAAsBoI,EAAS,CAAE9N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACxF9B,KAAA,YACAsL,eAAA,CACA,CAASvB,EAASlB,MAAA,CAASmB,WAAA,EAAAC,UAAA,IAC3B,CAASP,EAASb,MAAA,CAASe,SAAA,OAK3B5J,WACA,kBAGAgD,UAAAwG,GACA,IAAAjM,KAAAmE,SAAAnE,KAAAyJ,SAAAqF,MAAAC,GAAA,YAAAA,EAAApO,MAAA,CACA,MAAA6B,EAAA,gTAKYgC,EAAexE,KAAAqC,OAAArC,KAAAuE,OAAA5D,KAAAX,KAAAwC,SAE3B2E,MAAAmH,SAAArC,IChCO,MAAMmE,UAA0BxC,EAIvCnI,aAAAsC,EAAAsI,GAAAlM,QAAqCA,EAAA,MAAiB,IACtD,MAAAI,EAAA,CAAoB8L,YAEpB,GADA9L,EAAAgE,KAAAR,EAAAG,QAAA,aACA3D,EAAAgE,KAGA,OAAWqF,EAAStC,MAAAvD,EAAA,IAAsBqI,EAAiB,CAAE/N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CAChG9B,KAAA,qBACAqL,aAAA3J,EACA4J,eAAA,CACA,CAASQ,EAAQjD,OACjB,CAASa,EAASb,MAAA,CAASe,SAAA,OAK3B5J,WACA,4BCPA,SAAA6N,EAAAvI,EAAAwI,GACA,MAAAlO,EAAA0F,EAAA1F,OAEA,SAAA8F,EAAAxC,GACAoC,EAAAI,MAAAxC,GAGA,SAAAuC,KAAAjB,GACA,OAAAc,EAAAG,WAAAjB,GAYA,SAAAuJ,EAAAC,GACA,MAAAlI,EAAAL,EAAA,aACA,GAAAK,EAIA,OAHgBwH,EAAKzE,MAAAvD,EAAAQ,EAAAkI,IACf5B,EAASvD,MAAAvD,EAAAQ,EAAAkI,IACftI,EAAA,gCAaA,SAAAuI,IACA,OA5BA,WACA,MAAAL,EAAAnI,EAAA,YACA,GAAAmI,EACA,OAAAtI,EAAAb,MAAA,aACakJ,EAAiB9E,MAAAvD,EAAAsI,GAEnB1C,EAAgBrC,MAAAvD,EAAAsI,GAsB3BA,IACAG,KAXA,WACA,MAAArM,EAAA+D,EAAA,WACA,GAAA/D,EACA,OAAW+L,EAAU5E,MAAAvD,EAAA,CAAmB5D,aACxCqM,EAAA,CAAkBrM,aACZgM,EAAS7E,MAAAvD,EAAA,CAAmB5D,aAClCgE,EAAA,qCAMAhE,IACM+L,EAAU5E,MAAAvD,IACVsF,EAAI/B,MAAAvD,IACJ2F,EAAOpC,MAAAvD,IACPwF,EAAQjC,MAAAvD,IACRoI,EAAS7E,MAAAvD,GAsBf,MAAA4I,EAnBA,WACA,IAAAtO,EAAAkB,OAAA,SACA,MAAA0I,EAAA,GACA,QACA,MAAAiC,EAAiBzC,EAAkBH,MAAAvD,GACnCmC,EAAAwG,IACA,IAAAxG,EAAA,CACAgE,EAAA3K,QAAA4E,EAAA,6BACA,MAEA+B,EAAAT,SAAAyE,EACAjC,EAAAxF,KAAAyD,GAEA,MAAA0G,EAAA1I,EAAA,OAIA,OAHAqI,EAAAM,UACA5E,EAAAxF,KAAAmK,GAEA3E,EAEA6E,GAEA,OADA/I,EAAAzF,SAAAD,EAAAkB,QAAA4E,EAAA,uBACAwI,EAGO,SAAArF,EAAA3F,EAAA4K,EAAA,IACP,MAAAxI,EAAA,IAAwBvC,EAASG,GAIjC,YAHA,IAAA4K,EAAAlM,aACA0D,EAAA1F,OAAA1B,KAAA4P,EAAAlM,YAEAiM,EAAAvI,EAAAwI,GC/FA,SAAAQ,EAAAC,GACA,OAAAA,EAGA,MAAAC,EAAA,CACAC,KAAArE,KAAAxJ,KAAA,IACAD,OAAA2N,EACApQ,KAAAoQ,EACAI,UAAAJ,EACAtO,KAAAsO,EACA1H,QAAA0H,EACAvJ,YAAAuJ,EACAL,WAAAK,EACAK,kBAAAL,EACAM,2BAAAN,GAGO,SAAAO,GAAAC,GAAqBN,UAAAO,EAAAP,GAA4B,IAGxD,SAAAE,EAAAM,GAAAC,UAA2BA,EAAAxN,YAI3B,OAHAwN,IACAA,EAAAD,EAAAjL,WAAA,KAAAiL,EAAA7O,MAAA,GAAA6O,GAEAD,EAAAL,UAAAM,EAAAC,EAAAxN,GAGA,SAAAO,EAAAnD,EAAAqQ,EAAAZ,KAAA3C,GACA,IAAA9M,EACA,SAEA,MAAAD,EAAAsQ,EAAArQ,EAAAD,SAAA+M,GACA,OAAAoD,EAAAN,KAAA,CAAAM,EAAApO,OAAA9B,EAAA8B,QAAA/B,IAGA,SAAAuQ,EAAAtQ,EAAA4C,GACA,OAAAO,EAAAnD,EAAA6P,EAAA,CAAgCjN,YAGhC,SAAA2N,EAAAvQ,EAAA0P,GACA,OAAAvM,EAAAnD,EAAAkQ,EAAA7Q,KAAAqQ,GAGA,SAAAc,EAAAC,GACA,GAAAA,EAAApI,OAAAoI,EAAA1I,QACA,OAAAmI,EAAAN,KAAA,CACAzM,EAAAsN,EAAAxN,OAAAgE,KAAAiJ,EAAAnI,SACA5E,EAAAsN,EAAAxN,OAAAkE,SACAsJ,EAAArJ,QAAAvF,IAAAV,GACAgC,EAAAsN,EAAAxN,OAAA2E,SAGA,MAAA8I,EAAAD,EAAAxN,OAAAsF,QAAAkI,EAAAxN,OAAAgE,KACAsB,EAAAkI,EAAAxN,OAAAsF,OAAA,CACAkI,EAAAxN,OAAAsF,OAAAxI,MACAmQ,EAAApO,OAAA2O,EAAAxN,OAAAgE,KAAAnF,SACA,GACA6O,EAAAd,EAAAK,EAAAN,KAAA,IACArH,EACAkI,EAAAxN,OAAAgE,KAAAlH,MACAoD,EAAAsN,EAAAxN,OAAAuF,WACA,CAAS4H,UAAAK,EAAA9I,QAAA/E,QAAA6N,IACT,OAAAP,EAAAN,KAAA,CAAAM,EAAApO,OAAA4O,EAAA5O,QAAA6O,IAEA,SAAAxP,EAAAsP,GACA,OAAAP,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAqI,EAAAC,GACAtN,EAAAsN,EAAAxN,OAAA0D,UACAxD,EAAAsN,EAAAxN,OAAAwE,aAGA,SAAAoJ,EAAAjI,GACA,OAAAA,EAGAsH,EAAAN,KAAA,CACAzM,EAAAyF,EAAA3F,OAAA0F,WACAC,EAAAE,WAAAjH,IAAA7B,GAAAmD,EAAAnD,MAJA,GAOA,SAAAiL,EAAAyE,GACA,OAAAQ,EAAAN,KAAA,CACAgB,EAAAlB,EAAAvH,UACAhF,EAAAuM,EAAAzM,OAAAqH,UACA4F,EAAA/O,OAAAuO,EAAA/H,UACAxE,EAAAuM,EAAAzM,OAAAsH,UACAgG,EAAAb,EAAAzM,OAAA5D,KAAA,CAAmCyR,KAAApB,IACnCmB,EAAAnB,EAAAlF,SACArH,EAAAuM,EAAAzM,OAAAwE,aASA,SAAAsJ,EAAAN,GACA,MAAAnH,QAAWA,GAAUmH,EAAA1G,OACrB,OAAAmG,EAAAN,KAAA,CACAM,EAAApO,OAAA2O,EAAAxN,OAAA5D,KAAAyC,QACAoO,EAAAJ,kBAAAI,EAAAN,KAAA,CACAM,EAAAH,2BAAAU,EAAApR,MACA8D,EAAAsN,EAAA1G,OAAA9G,OAAA0F,QACA2H,EAAAG,EAAA1G,OAAA9G,OAAAmG,cAAAqH,GACAtN,EAAAsN,EAAA1G,OAAA9G,OAAAkE,SACAsJ,EAAA1G,OAAAV,KACAoH,EAAA1G,OAAAV,KAAAxH,IACA,oBAAAyH,EAAA0H,IAjBA,SAAAA,EAAApO,GACA,OAAAsN,EAAAN,KAAA,CACAU,EAAAU,EAAA/N,OAAAlD,MAAA6C,GACAO,EAAA6N,EAAA/N,OAAAwE,cAcAjE,CAAAwN,EAAAP,GAAAxF,GAFA,GAIA9H,EAAAsN,EAAA1G,OAAA9G,OAAA2E,UAEAzE,EAAAsN,EAAAxN,OAAAwE,aAGA,SAAAmJ,EAAAK,GACA,OAAAA,EAAAhP,OACAiO,EAAAN,KAAA,CACAzM,EAAA8N,EAAAhO,OAAAkE,SACA8J,EAAApP,IAAAkP,GACA5N,EAAA8N,EAAAhO,OAAA2E,SAJA,GA+CA,SAAAsJ,EAAAT,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA8L,UACA5L,EAAAsN,EAAAxN,OAAAJ,SACAM,EAAAsN,EAAAxN,OAAAgE,MACA9D,EAAAsN,EAAAxN,OAAAkJ,OACAoE,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,KAlBlCU,EAmBAV,EAlBAU,EAAAlO,OAAAiD,YAGAgK,EAAAN,KAAA,CACAzM,EAAAgO,EAAAlO,OAAAyJ,OACAwD,EAAApO,OAAAqP,EAAAlO,OAAAiD,YAAApE,QACAoO,EAAAhK,YAAA2J,EAAAsB,EAAAlO,OAAAiD,YAAAnG,MAAA,CAA8D6C,QAAAuO,OAL9D,IAkBAhO,EAAAsN,EAAAxN,OAAAkE,MACAiK,EAAAX,EAAA9D,QAAA8D,GACAtN,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,IAxBT,IAAAU,EAoGA,SAAAE,EAAAZ,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAmI,UACAjI,EAAAsN,EAAAxN,OAAAgE,KAAAiJ,EAAAnI,SACA5E,EAAAsN,EAAAxN,OAAAkE,MACA+I,EAAAN,KAAAa,EAAA9I,QAAA9F,IAAAV,IACAgC,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,WApPTpB,EAAA1Q,OAAAmJ,OAAA,GAAuBgH,EAAAO,GA0PvB,MAAAqB,EAAA,CACAC,UAAAN,EACAO,kBAAAP,EACAQ,UAAAR,EACAS,UAnJA,SAAAlB,EAAAa,GACA,MAAAM,EAAAnB,EAAA9I,QAAA,CACAuI,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCnO,EAAAsN,EAAAxN,OAAAkE,MACA+I,EAAAN,KAAAa,EAAAvG,UAAArI,IAAAoJ,IACA9H,EAAAsN,EAAAxN,OAAA2E,QACA,GACA,OAAAsI,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA6H,YACA8G,EACAzO,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YAuITO,UApIA,SAAApB,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA6H,SACA3H,EAAAsN,EAAAxN,OAAAmI,UACAjI,EAAAsN,EAAAxN,OAAAgE,MACAiJ,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCnO,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YA4HTQ,WAAAZ,EACAa,MA/FA,SAAAtB,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA0L,UACAuB,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCT,EAAAJ,EAAAjG,SACArH,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YAwFTU,MAtFA,SAAAvB,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAiJ,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCnO,EAAAsN,EAAAxN,OAAA0F,QACAxF,EAAAsN,EAAAxN,OAAAlD,OACAoD,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YA8ETW,QA5EA,SAAAxB,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAiJ,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,IAClCtN,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KAsETzL,SApEA,SAAAyL,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAmI,EAAAG,EAAAxN,OAAAiJ,OAAAuE,GACAtN,EAAAsN,EAAAxN,OAAA+B,UACAsL,EAAAG,EAAAxN,OAAAkJ,MAAAsE,GACAtN,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KA8DT1B,SA5DA,SAAA0B,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAsJ,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,IAClCtN,EAAAsN,EAAAxN,OAAA0F,QACAuH,EAAA/O,OAAAsP,EAAA9I,UACAxE,EAAAsN,EAAAxN,OAAAkE,SACAsJ,EAAAvG,UAAArI,IAAAoJ,GACA9H,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KAkDTyB,KAhDA,SAAAzB,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAsJ,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,IAClCtN,EAAAsN,EAAAxN,OAAAkE,MACAiK,EAAAX,EAAAzE,OAAAyE,GACAtN,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KAwCT0B,aAtCA,SAAAC,EAAAd,GACA,OAAApB,EAAAN,KAAA,CACAM,EAAApO,OAAAsQ,EAAAnP,OAAAlD,MAAA+B,QACAoO,EAAAd,WACAc,EAAAN,KAAA,KAAAM,EAAA7Q,KAAA+S,EAAArS,MAAA,CAAwC+Q,KAAAsB,EAAAd,WAAkB,MAC1D,CAASR,KAAAsB,EAAAd,WAETnO,EAAAiP,EAAAnP,OAAAwE,cAgCA4K,SAAAhB,EACAiB,eAAAjB,EACAkB,QAAAlB,EACAmB,QAAAnB,EACAoB,qBAAAvB,EACA5B,IAvBA,SAAAmB,GACA,OAAAP,EAAApO,OAAA2O,EAAA3O,UA+BA,SAAAsP,EAAAsB,EAAApB,GACA,IAAAoB,EAAA,OACA,MAAAC,EAAAD,EAAA7Q,IAAA+Q,IATA,SAAAnC,EAAAa,GAEA,IADAC,EAAAd,EAAAtP,MAEA,UAAAiE,eAA+BqL,EAAAtP,wBAE/B,OAAAoQ,EAAAd,EAAAtP,MAAAsP,EAAAa,IAIAuB,CAAAD,EAAAtB,IACA,OAAApB,EAAAN,KAAA+C,GAEA,OAAAvB,EAAAnB,GC3SA,SAAA6C,GAAAC,EAAAnI,GACA,MAAA/I,EAAA,IAAAmR,IACAhO,EAAA+N,EAAAtK,OAAAG,GAAA,aAAAA,EAAAzH,MACA,UAAA8R,KAAAjO,EAAA,CACA,MAAAmH,EAAAvB,EAAAjL,IAAAsT,EAAAjO,UACA,IAAAmH,EACA,SAEA,MAAA+G,EAAArR,EAAAlC,IAAAsT,EAAA/G,QACAgH,EACAA,EAAA/N,KAAAgH,GAEAtK,EAAAsR,IAAAF,EAAA/G,OAAA,CAAAC,IAGA,OAAAtK,EA2CA,SAAAuR,GAAAnD,GACA,MAAAtF,EAzCA,SAAAoI,GACA,MAAAnI,EAAA,IAAAoI,IACAK,EAAA,IAAA1F,IACAG,EAAA,IAAAkF,IACA,UAAApK,KAAAmK,EACA,GAAAnK,EAAA/F,QAAA,CACA,MAAAqQ,EAAApF,EAAAnO,IAAAiJ,EAAAvJ,MACA6T,EACAA,EAAA/N,KAAAyD,GAEAkF,EAAAqF,IAAAvK,EAAAvJ,KAAA,CAAAuJ,SAIAA,EAAAvJ,OAGAuL,EAAA2D,IAAA3F,EAAAvJ,MAGAgU,EAAAhF,IAAAzF,GAFAgC,EAAAuI,IAAAvK,EAAAvJ,KAAAuJ,IAKA,OACAmK,MACAnI,SACAkD,WACAuF,aACArF,SAAA8E,GAAAC,EAAAnI,IAaA0I,CAAArD,GACA,UAAArH,KAAA+B,EAAAoI,IACAnK,EAAAoE,iBACApE,EAAAoE,SAAArC,UAZA,WAAAC,OAAgCA,EAAAyI,eAChC,UAAAE,KAAAF,EAAA,CACA,MAAAhU,KAAWA,GAAOkU,EAClBrS,eAAiC7B,eAAkBuL,EAAAjL,IAAAN,GAAA8B,+BACzC+B,EAAKqQ,EAAAxS,OAAAwS,EAAAtQ,OAAA5D,KAAAkU,EAAArS,IAWfsS,CAAA7I,GAcO,SAAAqC,GAAAiD,GACP,UAAAmD,IAXAF,EAWAjD,EAVAiD,EAAAO,KACAP,EAAAO,OAEA,GAAAzP,UAAAkP,MAJA,IAAAA,ECzEAtU,EAAAQ,EAAAsU,EAAA,0BAAA1J,IAAApL,EAAAQ,EAAAsU,EAAA,0BAAA1D,KAAApR,EAAAQ,EAAAsU,EAAA,6BAAA1G","file":"webidl2.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"WebIDL2\"] = factory();\n\telse\n\t\troot[\"WebIDL2\"] = factory();\n})(this, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 0);\n","/**\n * @param {string} text\n */\nfunction lastLine(text) {\n const splitted = text.split(\"\\n\");\n return splitted[splitted.length - 1];\n}\n\n/**\n * @param {string} message error message\n * @param {\"Syntax\" | \"Validation\"} type error type\n */\nfunction error(source, position, current, message, type) {\n /**\n * @param {number} count\n */\n function sliceTokens(count) {\n return count > 0 ?\n source.slice(position, position + count) :\n source.slice(Math.max(position + count, 0), position);\n }\n\n function tokensToText(inputs, { precedes } = {}) {\n const text = inputs.map(t => t.trivia + t.value).join(\"\");\n const nextToken = source[position];\n if (nextToken.type === \"eof\") {\n return text;\n }\n if (precedes) {\n return text + nextToken.trivia;\n }\n return text.slice(nextToken.trivia.length);\n }\n\n const maxTokens = 5; // arbitrary but works well enough\n const line =\n source[position].type !== \"eof\" ? source[position].line :\n source.length > 1 ? source[position - 1].line :\n 1;\n\n const precedingLastLine = lastLine(\n tokensToText(sliceTokens(-maxTokens), { precedes: true })\n );\n\n const subsequentTokens = sliceTokens(maxTokens);\n const subsequentText = tokensToText(subsequentTokens);\n const subsequentFirstLine = subsequentText.split(\"\\n\")[0];\n\n const spaced = \" \".repeat(precedingLastLine.length) + \"^\";\n const sourceContext = precedingLastLine + subsequentFirstLine + \"\\n\" + spaced;\n\n const contextType = type === \"Syntax\" ? \"since\" : \"inside\";\n const inSourceName = source.name ? ` in ${source.name}` : \"\";\n const grammaticalContext = current ? `, ${contextType} \\`${current.partial ? \"partial \" : \"\"}${current.type} ${current.name}\\`` : \"\";\n const context = `${type} error at line ${line}${inSourceName}${grammaticalContext}:\\n${sourceContext}`;\n return {\n message: `${context} ${message}`,\n bareMessage: message,\n context,\n line,\n sourceName: source.name,\n input: subsequentText,\n tokens: subsequentTokens\n };\n}\n\n/**\n * @param {string} message error message\n */\nexport function syntaxError(source, position, current, message) {\n return error(source, position, current, message, \"Syntax\");\n}\n\n/**\n * @param {string} message error message\n */\nexport function validationError(source, token, current, message) {\n return error(source, token.index, current, message, \"Validation\");\n}\n","import { syntaxError } from \"./error.js\";\n\n// These regular expressions use the sticky flag so they will only match at\n// the current location (ie. the offset of lastIndex).\nconst tokenRe = {\n // This expression uses a lookahead assertion to catch false matches\n // against integers early.\n \"decimal\": /-?(?=[0-9]*\\.|[0-9]+[eE])(([0-9]+\\.[0-9]*|[0-9]*\\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,\n \"integer\": /-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,\n \"identifier\": /[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,\n \"string\": /\"[^\"]*\"/y,\n \"whitespace\": /[\\t\\n\\r ]+/y,\n \"comment\": /((\\/(\\/.*|\\*([^*]|\\*[^/])*\\*\\/)[\\t\\n\\r ]*)+)/y,\n \"other\": /[^\\t\\n\\r 0-9A-Za-z]/y\n};\n\nexport const stringTypes = [\n \"ByteString\",\n \"DOMString\",\n \"USVString\"\n];\n\nexport const argumentNameKeywords = [\n \"attribute\",\n \"callback\",\n \"const\",\n \"deleter\",\n \"dictionary\",\n \"enum\",\n \"getter\",\n \"includes\",\n \"inherit\",\n \"interface\",\n \"iterable\",\n \"maplike\",\n \"namespace\",\n \"partial\",\n \"required\",\n \"setlike\",\n \"setter\",\n \"static\",\n \"stringifier\",\n \"typedef\",\n \"unrestricted\"\n];\n\nconst nonRegexTerminals = [\n \"-Infinity\",\n \"FrozenArray\",\n \"Infinity\",\n \"NaN\",\n \"Promise\",\n \"boolean\",\n \"byte\",\n \"double\",\n \"false\",\n \"float\",\n \"implements\",\n \"legacyiterable\",\n \"long\",\n \"mixin\",\n \"null\",\n \"octet\",\n \"optional\",\n \"or\",\n \"readonly\",\n \"record\",\n \"sequence\",\n \"short\",\n \"true\",\n \"unsigned\",\n \"void\"\n].concat(argumentNameKeywords, stringTypes);\n\nconst punctuations = [\n \"(\",\n \")\",\n \",\",\n \"...\",\n \":\",\n \";\",\n \"<\",\n \"=\",\n \">\",\n \"?\",\n \"[\",\n \"]\",\n \"{\",\n \"}\"\n];\n\n/**\n * @param {string} str\n */\nfunction tokenise(str) {\n const tokens = [];\n let lastCharIndex = 0;\n let trivia = \"\";\n let line = 1;\n let index = 0;\n while (lastCharIndex < str.length) {\n const nextChar = str.charAt(lastCharIndex);\n let result = -1;\n\n if (/[\\t\\n\\r ]/.test(nextChar)) {\n result = attemptTokenMatch(\"whitespace\", { noFlushTrivia: true });\n } else if (nextChar === '/') {\n result = attemptTokenMatch(\"comment\", { noFlushTrivia: true });\n }\n\n if (result !== -1) {\n const currentTrivia = tokens.pop().value;\n line += (currentTrivia.match(/\\n/g) || []).length;\n trivia += currentTrivia;\n index -= 1;\n } else if (/[-0-9.A-Z_a-z]/.test(nextChar)) {\n result = attemptTokenMatch(\"decimal\");\n if (result === -1) {\n result = attemptTokenMatch(\"integer\");\n }\n if (result === -1) {\n result = attemptTokenMatch(\"identifier\");\n const token = tokens[tokens.length - 1];\n if (result !== -1 && nonRegexTerminals.includes(token.value)) {\n token.type = token.value;\n }\n }\n } else if (nextChar === '\"') {\n result = attemptTokenMatch(\"string\");\n }\n\n for (const punctuation of punctuations) {\n if (str.startsWith(punctuation, lastCharIndex)) {\n tokens.push({ type: punctuation, value: punctuation, trivia, line, index });\n trivia = \"\";\n lastCharIndex += punctuation.length;\n result = lastCharIndex;\n break;\n }\n }\n\n // other as the last try\n if (result === -1) {\n result = attemptTokenMatch(\"other\");\n }\n if (result === -1) {\n throw new Error(\"Token stream not progressing\");\n }\n lastCharIndex = result;\n index += 1;\n }\n\n // remaining trivia as eof\n tokens.push({\n type: \"eof\",\n value: \"\",\n trivia\n });\n\n return tokens;\n\n /**\n * @param {keyof tokenRe} type\n * @param {object} [options]\n * @param {boolean} [options.noFlushTrivia]\n */\n function attemptTokenMatch(type, { noFlushTrivia } = {}) {\n const re = tokenRe[type];\n re.lastIndex = lastCharIndex;\n const result = re.exec(str);\n if (result) {\n tokens.push({ type, value: result[0], trivia, line, index });\n if (!noFlushTrivia) {\n trivia = \"\";\n }\n return re.lastIndex;\n }\n return -1;\n }\n}\n\nexport class Tokeniser {\n /**\n * @param {string} idl\n */\n constructor(idl) {\n this.source = tokenise(idl);\n this.position = 0;\n }\n\n /**\n * @param {string} message\n */\n error(message) {\n throw new WebIDLParseError(syntaxError(this.source, this.position, this.current, message));\n }\n\n /**\n * @param {string} type\n */\n probe(type) {\n return this.source.length > this.position && this.source[this.position].type === type;\n }\n\n /**\n * @param {...string} candidates\n */\n consume(...candidates) {\n for (const type of candidates) {\n if (!this.probe(type)) continue;\n const token = this.source[this.position];\n this.position++;\n return token;\n }\n }\n\n /**\n * @param {number} position\n */\n unconsume(position) {\n this.position = position;\n }\n}\n\nclass WebIDLParseError extends Error {\n constructor({ message, bareMessage, context, line, sourceName, input, tokens }) {\n super(message);\n\n this.name = \"WebIDLParseError\"; // not to be mangled\n this.bareMessage = bareMessage;\n this.context = context;\n this.line = line;\n this.sourceName = sourceName;\n this.input = input;\n this.tokens = tokens;\n }\n}\n","export class Base {\n constructor({ source, tokens }) {\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens }\n });\n }\n\n toJSON() {\n const json = { type: undefined, name: undefined, inheritance: undefined };\n let proto = this;\n while (proto !== Object.prototype) {\n const descMap = Object.getOwnPropertyDescriptors(proto);\n for (const [key, value] of Object.entries(descMap)) {\n if (value.enumerable || value.get) {\n json[key] = this[key];\n }\n }\n proto = Object.getPrototypeOf(proto);\n }\n return json;\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape, type_with_extended_attributes, return_type, primitive_type } from \"./helpers.js\";\nimport { stringTypes } from \"../tokeniser.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction generic_type(tokeniser, typeName) {\n const base = tokeniser.consume(\"FrozenArray\", \"Promise\", \"sequence\", \"record\");\n if (!base) {\n return;\n }\n const ret = new Type({ source: tokeniser.source, tokens: { base } });\n ret.tokens.open = tokeniser.consume(\"<\") || tokeniser.error(`No opening bracket after ${base.type}`);\n switch (base.type) {\n case \"Promise\": {\n if (tokeniser.probe(\"[\")) tokeniser.error(\"Promise type cannot have extended attribute\");\n const subtype = return_type(tokeniser, typeName) || tokeniser.error(\"Missing Promise subtype\");\n ret.subtype.push(subtype);\n break;\n }\n case \"sequence\":\n case \"FrozenArray\": {\n const subtype = type_with_extended_attributes(tokeniser, typeName) || tokeniser.error(`Missing ${base.type} subtype`);\n ret.subtype.push(subtype);\n break;\n }\n case \"record\": {\n if (tokeniser.probe(\"[\")) tokeniser.error(\"Record key cannot have extended attribute\");\n const keyType = tokeniser.consume(...stringTypes) || tokeniser.error(`Record key must be one of: ${stringTypes.join(\", \")}`);\n const keyIdlType = new Type({ source: tokeniser.source, tokens: { base: keyType }});\n keyIdlType.tokens.separator = tokeniser.consume(\",\") || tokeniser.error(\"Missing comma after record key type\");\n keyIdlType.type = typeName;\n const valueType = type_with_extended_attributes(tokeniser, typeName) || tokeniser.error(\"Error parsing generic type record\");\n ret.subtype.push(keyIdlType, valueType);\n break;\n }\n }\n if (!ret.idlType) tokeniser.error(`Error parsing generic type ${base.type}`);\n ret.tokens.close = tokeniser.consume(\">\") || tokeniser.error(`Missing closing bracket after ${base.type}`);\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction type_suffix(tokeniser, obj) {\n const nullable = tokeniser.consume(\"?\");\n if (nullable) {\n obj.tokens.nullable = nullable;\n }\n if (tokeniser.probe(\"?\")) tokeniser.error(\"Can't nullable more than once\");\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction single_type(tokeniser, typeName) {\n let ret = generic_type(tokeniser, typeName) || primitive_type(tokeniser);\n if (!ret) {\n const base = tokeniser.consume(\"identifier\", ...stringTypes);\n if (!base) {\n return;\n }\n ret = new Type({ source: tokeniser.source, tokens: { base } });\n if (tokeniser.probe(\"<\")) tokeniser.error(`Unsupported generic type ${base.value}`);\n }\n if (ret.generic === \"Promise\" && tokeniser.probe(\"?\")) {\n tokeniser.error(\"Promise type cannot be nullable\");\n }\n ret.type = typeName || null;\n type_suffix(tokeniser, ret);\n if (ret.nullable && ret.idlType === \"any\") tokeniser.error(\"Type `any` cannot be made nullable\");\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} type\n */\nfunction union_type(tokeniser, type) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"(\");\n if (!tokens.open) return;\n const ret = new Type({ source: tokeniser.source, tokens });\n ret.type = type || null;\n while (true) {\n const typ = type_with_extended_attributes(tokeniser) || tokeniser.error(\"No type after open parenthesis or 'or' in union type\");\n if (typ.idlType === \"any\") tokeniser.error(\"Type `any` cannot be included in a union type\");\n ret.subtype.push(typ);\n const or = tokeniser.consume(\"or\");\n if (or) {\n typ.tokens.separator = or;\n }\n else break;\n }\n if (ret.idlType.length < 2) {\n tokeniser.error(\"At least two types are expected in a union type but found less\");\n }\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unterminated union type\");\n type_suffix(tokeniser, ret);\n return ret;\n}\n\nexport class Type extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\n static parse(tokeniser, typeName) {\n return single_type(tokeniser, typeName) || union_type(tokeniser, typeName);\n }\n\n constructor({ source, tokens }) {\n super({ source, tokens });\n Object.defineProperty(this, \"subtype\", { value: [] });\n this.extAttrs = [];\n }\n\n get generic() {\n if (this.subtype.length && this.tokens.base) {\n return this.tokens.base.value;\n }\n return \"\";\n }\n get nullable() {\n return Boolean(this.tokens.nullable);\n }\n get union() {\n return Boolean(this.subtype.length) && !this.tokens.base;\n }\n get idlType() {\n if (this.subtype.length) {\n return this.subtype;\n }\n // Adding prefixes/postfixes for \"unrestricted float\", etc.\n const name = [\n this.tokens.prefix,\n this.tokens.base,\n this.tokens.postfix\n ].filter(t => t).map(t => t.value).join(\" \");\n return unescape(name);\n }\n}\n","import { Base } from \"./base.js\";\nimport { const_data, const_value } from \"./helpers.js\";\n\nexport class Default extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const assign = tokeniser.consume(\"=\");\n if (!assign) {\n return null;\n }\n const def = const_value(tokeniser) || tokeniser.consume(\"string\", \"null\", \"[\", \"{\") || tokeniser.error(\"No value for default\");\n const expression = [def];\n if (def.type === \"[\") {\n const close = tokeniser.consume(\"]\") || tokeniser.error(\"Default sequence value must be empty\");\n expression.push(close);\n } else if (def.type === \"{\") {\n const close = tokeniser.consume(\"}\") || tokeniser.error(\"Default dictionary value must be empty\");\n expression.push(close);\n }\n return new Default({ source: tokeniser.source, tokens: { assign }, expression });\n }\n\n constructor({ source, tokens, expression }) {\n super({ source, tokens });\n Object.defineProperty(this, \"expression\", { value: expression });\n }\n\n get type() {\n return const_data(this.expression[0]).type;\n }\n get value() {\n return const_data(this.expression[0]).value;\n }\n get negative() {\n return const_data(this.expression[0]).negative;\n }\n}\n","export class ArrayBase extends Array {\n constructor({ source, tokens }) {\n super();\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens }\n });\n }\n}\n","import { Base } from \"./base.js\";\nimport { ArrayBase } from \"./array-base.js\";\nimport { list, identifiers, argument_list } from \"./helpers.js\";\n\nclass ExtendedAttributeParameters extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = { assign: tokeniser.consume(\"=\") };\n const ret = new ExtendedAttributeParameters({ source: tokeniser.source, tokens });\n if (tokens.assign) {\n tokens.secondaryName = tokeniser.consume(\"identifier\", \"decimal\", \"integer\", \"string\");\n }\n tokens.open = tokeniser.consume(\"(\");\n if (tokens.open) {\n ret.list = ret.rhsType === \"identifier-list\" ?\n // [Exposed=(Window,Worker)]\n identifiers(tokeniser) :\n // [NamedConstructor=Audio(DOMString src)] or [Constructor(DOMString str)]\n argument_list(tokeniser);\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unexpected token in extended attribute argument list\");\n } else if (ret.hasRhs && !tokens.secondaryName) {\n tokeniser.error(\"No right hand side to extended attribute assignment\");\n }\n return ret;\n }\n\n get rhsType() {\n return !this.tokens.assign ? null :\n !this.tokens.secondaryName ? \"identifier-list\" :\n this.tokens.secondaryName.type;\n }\n}\n\nclass SimpleExtendedAttribute extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const name = tokeniser.consume(\"identifier\");\n if (name) {\n return new SimpleExtendedAttribute({\n tokens: { name },\n params: ExtendedAttributeParameters.parse(tokeniser)\n });\n }\n }\n\n constructor({ source, tokens, params }) {\n super({ source, tokens });\n Object.defineProperty(this, \"params\", { value: params });\n }\n\n get type() {\n return \"extended-attribute\";\n }\n get name() {\n return this.tokens.name.value;\n }\n get rhs() {\n const { rhsType: type, tokens, list } = this.params;\n if (!type) {\n return null;\n }\n const value = type === \"identifier-list\" ? list : tokens.secondaryName.value;\n return { type, value };\n }\n get arguments() {\n const { rhsType, list } = this.params;\n if (!list || rhsType === \"identifier-list\") {\n return [];\n }\n return list;\n }\n}\n\n// Note: we parse something simpler than the official syntax. It's all that ever\n// seems to be used\nexport class ExtendedAttributes extends ArrayBase {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"[\");\n if (!tokens.open) return [];\n const ret = new ExtendedAttributes({ source: tokeniser.source, tokens });\n ret.push(...list(tokeniser, {\n parser: SimpleExtendedAttribute.parse,\n listName: \"extended attribute\"\n }));\n tokens.close = tokeniser.consume(\"]\") || tokeniser.error(\"Unexpected closing token of extended attribute\");\n if (!ret.length) {\n tokeniser.error(\"Found an empty extended attribute\");\n }\n if (tokeniser.probe(\"[\")) {\n tokeniser.error(\"Illegal double extended attribute lists, consider merging them\");\n }\n return ret;\n }\n}\n","import { Type } from \"./type.js\";\nimport { Argument } from \"./argument.js\";\nimport { Token } from \"./token.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { Operation } from \"./operation.js\";\nimport { Attribute } from \"./attribute.js\";\n\n/**\n * @param {string} identifier\n */\nexport function unescape(identifier) {\n return identifier.startsWith('_') ? identifier.slice(1) : identifier;\n}\n\n/**\n * Parses comma-separated list\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {object} args\n * @param {Function} args.parser parser function for each item\n * @param {boolean} [args.allowDangler] whether to allow dangling comma\n * @param {string} [args.listName] the name to be shown on error messages\n */\nexport function list(tokeniser, { parser, allowDangler, listName = \"list\" }) {\n const first = parser(tokeniser);\n if (!first) {\n return [];\n }\n first.tokens.separator = tokeniser.consume(\",\");\n const items = [first];\n while (first.tokens.separator) {\n const item = parser(tokeniser);\n if (!item) {\n if (!allowDangler) {\n tokeniser.error(`Trailing comma in ${listName}`);\n }\n break;\n }\n item.tokens.separator = tokeniser.consume(\",\");\n items.push(item);\n if (!item.tokens.separator) break;\n }\n return items;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function const_value(tokeniser) {\n return tokeniser.consume(\"true\", \"false\", \"Infinity\", \"-Infinity\", \"NaN\", \"decimal\", \"integer\");\n}\n\n/**\n * @param {object} token\n * @param {string} token.type\n * @param {string} token.value\n */\nexport function const_data({ type, value }) {\n switch (type) {\n case \"true\":\n case \"false\":\n return { type: \"boolean\", value: type === \"true\" };\n case \"Infinity\":\n case \"-Infinity\":\n return { type: \"Infinity\", negative: type.startsWith(\"-\") };\n case \"[\":\n return { type: \"sequence\", value: [] };\n case \"{\":\n return { type: \"dictionary\" };\n case \"decimal\":\n case \"integer\":\n return { type: \"number\", value };\n case \"string\":\n return { type: \"string\", value: value.slice(1, -1) };\n default:\n return { type };\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function primitive_type(tokeniser) {\n function integer_type() {\n const prefix = tokeniser.consume(\"unsigned\");\n const base = tokeniser.consume(\"short\", \"long\");\n if (base) {\n const postfix = tokeniser.consume(\"long\");\n return new Type({ source, tokens: { prefix, base, postfix } });\n }\n if (prefix) tokeniser.error(\"Failed to parse integer type\");\n }\n\n function decimal_type() {\n const prefix = tokeniser.consume(\"unrestricted\");\n const base = tokeniser.consume(\"float\", \"double\");\n if (base) {\n return new Type({ source, tokens: { prefix, base } });\n }\n if (prefix) tokeniser.error(\"Failed to parse float type\");\n }\n\n const { source } = tokeniser;\n const num_type = integer_type(tokeniser) || decimal_type(tokeniser);\n if (num_type) return num_type;\n const base = tokeniser.consume(\"boolean\", \"byte\", \"octet\");\n if (base) {\n return new Type({ source, tokens: { base } });\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function identifiers(tokeniser) {\n const ids = list(tokeniser, { parser: Token.parser(tokeniser, \"identifier\"), listName: \"identifier list\" });\n if (!ids.length) {\n tokeniser.error(\"Expected identifiers but none found\");\n }\n return ids;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function argument_list(tokeniser) {\n return list(tokeniser, { parser: Argument.parse, listName: \"arguments list\" });\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nexport function type_with_extended_attributes(tokeniser, typeName) {\n const extAttrs = ExtendedAttributes.parse(tokeniser);\n const ret = Type.parse(tokeniser, typeName);\n if (ret) ret.extAttrs = extAttrs;\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nexport function return_type(tokeniser, typeName) {\n const typ = Type.parse(tokeniser, typeName || \"return-type\");\n if (typ) {\n return typ;\n }\n const voidToken = tokeniser.consume(\"void\");\n if (voidToken) {\n const ret = new Type({ source: tokeniser.source, tokens: { base: voidToken } });\n ret.type = \"return-type\";\n return ret;\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function stringifier(tokeniser) {\n const special = tokeniser.consume(\"stringifier\");\n if (!special) return;\n const member = Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"Unterminated stringifier\");\n return member;\n}\n","import { Base } from \"./base.js\";\nimport { Default } from \"./default.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { unescape, type_with_extended_attributes } from \"./helpers.js\";\nimport { argumentNameKeywords } from \"../tokeniser.js\";\n\nexport class Argument extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n const tokens = {};\n const ret = new Argument({ source: tokeniser.source, tokens });\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.optional = tokeniser.consume(\"optional\");\n ret.idlType = type_with_extended_attributes(tokeniser, \"argument-type\");\n if (!ret.idlType) {\n return tokeniser.unconsume(start_position);\n }\n if (!tokens.optional) {\n tokens.variadic = tokeniser.consume(\"...\");\n }\n tokens.name = tokeniser.consume(\"identifier\", ...argumentNameKeywords);\n if (!tokens.name) {\n return tokeniser.unconsume(start_position);\n }\n ret.default = tokens.optional ? Default.parse(tokeniser) : null;\n return ret;\n }\n\n get optional() {\n return !!this.tokens.optional;\n }\n get variadic() {\n return !!this.tokens.variadic;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base.js\";\n\nexport class Token extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} type\n */\n static parser(tokeniser, type) {\n return () => {\n const value = tokeniser.consume(type);\n if (value) {\n return new Token({ source: tokeniser.source, tokens: { value } });\n }\n };\n }\n\n get value() {\n return this.tokens.value.value;\n }\n}\n","export function idlTypeIncludesDictionary(idlType, defs) {\n if (!idlType.union) {\n const def = defs.unique.get(idlType.idlType);\n if (!def) {\n return false;\n }\n if (def.type === \"typedef\") {\n return idlTypeIncludesDictionary(def.idlType, defs);\n }\n return def.type === \"dictionary\";\n }\n for (const subtype of idlType.subtype) {\n if (idlTypeIncludesDictionary(subtype, defs)) {\n return true;\n }\n }\n return false;\n}\n","import { Base } from \"./base.js\";\nimport { return_type, argument_list, unescape } from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\nimport { idlTypeIncludesDictionary } from \"../validators/helpers.js\";\n\nexport class Operation extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { special, regular } = {}) {\n const tokens = { special };\n const ret = new Operation({ source: tokeniser.source, tokens });\n if (special && special.value === \"stringifier\") {\n tokens.termination = tokeniser.consume(\";\");\n if (tokens.termination) {\n ret.arguments = [];\n return ret;\n }\n }\n if (!special && !regular) {\n tokens.special = tokeniser.consume(\"getter\", \"setter\", \"deleter\");\n }\n ret.idlType = return_type(tokeniser) || tokeniser.error(\"Missing return type\");\n tokens.name = tokeniser.consume(\"identifier\");\n tokens.open = tokeniser.consume(\"(\") || tokeniser.error(\"Invalid operation\");\n ret.arguments = argument_list(tokeniser);\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unterminated operation\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated operation, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"operation\";\n }\n get name() {\n const { name } = this.tokens;\n if (!name) {\n return \"\";\n }\n return unescape(name.value);\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n\n *validate(defs) {\n for (const argument of this.arguments) {\n if (idlTypeIncludesDictionary(argument.idlType, defs)) {\n if (!argument.default) {\n const message = `Optional dictionary arguments must have a default value of \\`{}\\`.`;\n yield validationError(this.source, argument.tokens.name, this, message);\n }\n }\n }\n }\n}\n","import { Base } from \"./base.js\";\nimport { type_with_extended_attributes, unescape } from \"./helpers.js\";\n\nexport class Attribute extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { special, noInherit = false, readonly = false } = {}) {\n const start_position = tokeniser.position;\n const tokens = { special };\n const ret = new Attribute({ source: tokeniser.source, tokens });\n if (!special && !noInherit) {\n tokens.special = tokeniser.consume(\"inherit\");\n }\n if (ret.special === \"inherit\" && tokeniser.probe(\"readonly\")) {\n tokeniser.error(\"Inherited attributes cannot be read-only\");\n }\n tokens.readonly = tokeniser.consume(\"readonly\");\n if (readonly && !tokens.readonly && tokeniser.probe(\"attribute\")) {\n tokeniser.error(\"Attributes must be readonly in this context\");\n }\n tokens.base = tokeniser.consume(\"attribute\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n ret.idlType = type_with_extended_attributes(tokeniser, \"attribute-type\") || tokeniser.error(\"Attribute lacks a type\");\n switch (ret.idlType.generic) {\n case \"sequence\":\n case \"record\": tokeniser.error(`Attributes cannot accept ${ret.idlType.generic} types`);\n }\n tokens.name = tokeniser.consume(\"identifier\", \"required\") || tokeniser.error(\"Attribute lacks a name\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated attribute, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"attribute\";\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { list, unescape } from \"./helpers.js\";\nimport { Token } from \"./token.js\";\nimport { Base } from \"./base.js\";\n\nclass EnumValue extends Token {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const value = tokeniser.consume(\"string\");\n if (value) {\n return new EnumValue({ source: tokeniser.source, tokens: { value } });\n }\n }\n\n get type() {\n return \"enum-value\";\n }\n get value() {\n return super.value.slice(1, -1);\n }\n}\n\nexport class Enum extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.base = tokeniser.consume(\"enum\");\n if (!tokens.base) {\n return;\n }\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"No name for enum\");\n const ret = tokeniser.current = new Enum({ source: tokeniser.source, tokens });\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(\"Bodyless enum\");\n ret.values = list(tokeniser, {\n parser: EnumValue.parse,\n allowDangler: true,\n listName: \"enumeration\"\n });\n if (tokeniser.probe(\"string\")) {\n tokeniser.error(\"No comma between enum values\");\n }\n tokens.close = tokeniser.consume(\"}\") || tokeniser.error(\"Unexpected value in enum\");\n if (!ret.values.length) {\n tokeniser.error(\"No value in enum\");\n }\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"No semicolon after enum\");\n return ret;\n }\n\n get type() {\n return \"enum\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape } from \"./helpers.js\";\n\nexport class Includes extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const target = tokeniser.consume(\"identifier\");\n if (!target) {\n return;\n }\n const tokens = { target };\n tokens.includes = tokeniser.consume(\"includes\");\n if (!tokens.includes) {\n tokeniser.unconsume(target.index);\n return;\n }\n tokens.mixin = tokeniser.consume(\"identifier\") || tokeniser.error(\"Incomplete includes statement\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"No terminating ; for includes statement\");\n return new Includes({ source: tokeniser.source, tokens });\n }\n\n get type() {\n return \"includes\";\n }\n get target() {\n return unescape(this.tokens.target.value);\n }\n get includes() {\n return unescape(this.tokens.mixin.value);\n }\n}\n","import { Base } from \"./base\";\nimport { type_with_extended_attributes, unescape } from \"./helpers\";\n\nexport class Typedef extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n const ret = new Typedef({ source: tokeniser.source, tokens });\n tokens.base = tokeniser.consume(\"typedef\");\n if (!tokens.base) {\n return;\n }\n ret.idlType = type_with_extended_attributes(tokeniser, \"typedef-type\") || tokeniser.error(\"Typedef lacks a type\");\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Typedef lacks a name\");\n tokeniser.current = ret;\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated typedef, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"typedef\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base\";\nimport { return_type, argument_list, unescape } from \"./helpers\";\n\nexport class CallbackFunction extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base) {\n const tokens = { base };\n const ret = new CallbackFunction({ source: tokeniser.source, tokens });\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Callback lacks a name\");\n tokeniser.current = ret;\n tokens.assign = tokeniser.consume(\"=\") || tokeniser.error(\"Callback lacks an assignment\");\n ret.idlType = return_type(tokeniser) || tokeniser.error(\"Callback lacks a return type\");\n tokens.open = tokeniser.consume(\"(\") || tokeniser.error(\"Callback lacks parentheses for arguments\");\n ret.arguments = argument_list(tokeniser);\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unterminated callback\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated callback, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"callback\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { unescape } from \"./helpers.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction inheritance(tokeniser) {\n const colon = tokeniser.consume(\":\");\n if (!colon) {\n return {};\n }\n const inheritance = tokeniser.consume(\"identifier\") || tokeniser.error(\"Inheritance lacks a type\");\n return { colon, inheritance };\n}\n\nexport class Container extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {*} instance\n * @param {*} args\n */\n static parse(tokeniser, instance, { type, inheritable, allowedMembers }) {\n const { tokens } = instance;\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(`Missing name in ${instance.type}`);\n tokeniser.current = instance;\n if (inheritable) {\n Object.assign(tokens, inheritance(tokeniser));\n }\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(`Bodyless ${type}`);\n instance.members = [];\n while (true) {\n tokens.close = tokeniser.consume(\"}\");\n if (tokens.close) {\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(`Missing semicolon after ${type}`);\n return instance;\n }\n const ea = ExtendedAttributes.parse(tokeniser);\n let mem;\n for (const [parser, ...args] of allowedMembers) {\n mem = parser(tokeniser, ...args);\n if (mem) {\n break;\n }\n }\n if (!mem) {\n tokeniser.error(\"Unknown member\");\n }\n mem.extAttrs = ea;\n instance.members.push(mem);\n }\n }\n\n get partial() {\n return !!this.tokens.partial;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get inheritance() {\n if (!this.tokens.inheritance) {\n return null;\n }\n return unescape(this.tokens.inheritance.value);\n }\n\n *validate(defs) {\n for (const member of this.members) {\n if (member.validate) {\n yield* member.validate(defs);\n }\n }\n }\n }\n","import { Base } from \"./base.js\";\nimport { Type } from \"./type.js\";\nimport { const_data, const_value, primitive_type } from \"./helpers.js\";\n\nexport class Constant extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.base = tokeniser.consume(\"const\");\n if (!tokens.base) {\n return;\n }\n let idlType = primitive_type(tokeniser);\n if (!idlType) {\n const base = tokeniser.consume(\"identifier\") || tokeniser.error(\"Const lacks a type\");\n idlType = new Type({ source: tokeniser.source, tokens: { base } });\n }\n if (tokeniser.probe(\"?\")) {\n tokeniser.error(\"Unexpected nullable constant type\");\n }\n idlType.type = \"const-type\";\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Const lacks a name\");\n tokens.assign = tokeniser.consume(\"=\") || tokeniser.error(\"Const lacks value assignment\");\n tokens.value = const_value(tokeniser) || tokeniser.error(\"Const lacks a value\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated const, expected `;`\");\n const ret = new Constant({ source: tokeniser.source, tokens });\n ret.idlType = idlType;\n return ret;\n }\n\n get type() {\n return \"const\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get value() {\n return const_data(this.tokens.value);\n }\n}\n","import { Base } from \"./base\";\nimport { type_with_extended_attributes } from \"./helpers\";\n\nexport class IterableLike extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n const tokens = {};\n const ret = new IterableLike({ source: tokeniser.source, tokens });\n tokens.readonly = tokeniser.consume(\"readonly\");\n tokens.base = tokens.readonly ?\n tokeniser.consume(\"maplike\", \"setlike\") :\n tokeniser.consume(\"iterable\", \"maplike\", \"setlike\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n\n const { type } = ret;\n const secondTypeRequired = type === \"maplike\";\n const secondTypeAllowed = secondTypeRequired || type === \"iterable\";\n\n tokens.open = tokeniser.consume(\"<\") || tokeniser.error(`Missing less-than sign \\`<\\` in ${type} declaration`);\n const first = type_with_extended_attributes(tokeniser) || tokeniser.error(`Missing a type argument in ${type} declaration`);\n ret.idlType = [first];\n if (secondTypeAllowed) {\n first.tokens.separator = tokeniser.consume(\",\");\n if (first.tokens.separator) {\n ret.idlType.push(type_with_extended_attributes(tokeniser));\n }\n else if (secondTypeRequired)\n tokeniser.error(`Missing second type argument in ${type} declaration`);\n }\n tokens.close = tokeniser.consume(\">\") || tokeniser.error(`Missing greater-than sign \\`>\\` in ${type} declaration`);\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(`Missing semicolon after ${type} declaration`);\n\n return ret;\n }\n\n get type() {\n return this.tokens.base.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\nimport { IterableLike } from \"./iterable.js\";\nimport { stringifier } from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\nimport { checkInterfaceMemberDuplication } from \"../validators/interface.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction static_member(tokeniser) {\n const special = tokeniser.consume(\"static\");\n if (!special) return;\n const member = Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"No body in static member\");\n return member;\n}\n\nexport class Interface extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base, { partial = null } = {}) {\n const tokens = { partial, base };\n return Container.parse(tokeniser, new Interface({ source: tokeniser.source, tokens }), {\n type: \"interface\",\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [static_member],\n [stringifier],\n [IterableLike.parse],\n [Attribute.parse],\n [Operation.parse]\n ]\n });\n }\n\n get type() {\n return \"interface\";\n }\n\n *validate(defs) {\n if (!this.partial && this.extAttrs.every(extAttr => extAttr.name !== \"Exposed\")) {\n const message = `Interfaces must have \\`[Exposed]\\` extended attribute. \\\nTo fix, add, for example, \\`[Exposed=Window]\\`. Please also consider carefully \\\nif your interface should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(this.source, this.tokens.name, this, message);\n }\n yield* super.validate(defs);\n if (!this.partial) {\n yield* checkInterfaceMemberDuplication(defs, this);\n }\n }\n}\n","import { validationError } from \"../error.js\";\n\nexport function* checkInterfaceMemberDuplication(defs, i) {\n const opNames = new Set(getOperations(i).map(op => op.name));\n const partials = defs.partials.get(i.name) || [];\n const mixins = defs.mixinMap.get(i.name) || [];\n for (const ext of [...partials, ...mixins]) {\n const additions = getOperations(ext);\n yield* forEachExtension(additions, opNames, ext, i);\n for (const addition of additions) {\n opNames.add(addition.name);\n }\n }\n\n function* forEachExtension(additions, existings, ext, base) {\n for (const addition of additions) {\n const { name } = addition;\n if (name && existings.has(name)) {\n const message = `The operation \"${name}\" has already been defined for the base interface \"${base.name}\" either in itself or in a mixin`;\n yield validationError(ext.source, addition.tokens.name, ext, message);\n }\n }\n }\n\n function getOperations(i) {\n return i.members\n .filter(({type}) => type === \"operation\");\n }\n}\n","import { Container } from \"./container.js\";\nimport { Constant } from \"./constant.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { stringifier } from \"./helpers.js\";\n\nexport class Mixin extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base, { partial } = {}) {\n const tokens = { partial, base };\n tokens.mixin = tokeniser.consume(\"mixin\");\n if (!tokens.mixin) {\n return;\n }\n return Container.parse(tokeniser, new Mixin({ source: tokeniser.source, tokens }), {\n type: \"interface mixin\",\n allowedMembers: [\n [Constant.parse],\n [stringifier],\n [Attribute.parse, { noInherit: true }],\n [Operation.parse, { regular: true }]\n ]\n });\n }\n\n get type() {\n return \"interface mixin\";\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape, type_with_extended_attributes } from \"./helpers.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { Default } from \"./default.js\";\n\nexport class Field extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n const ret = new Field({ source: tokeniser.source, tokens });\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.required = tokeniser.consume(\"required\");\n ret.idlType = type_with_extended_attributes(tokeniser, \"dictionary-type\") || tokeniser.error(\"Dictionary member lacks a type\");\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Dictionary member lacks a name\");\n ret.default = Default.parse(tokeniser);\n if (tokens.required && ret.default) tokeniser.error(\"Required member must not have a default\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated dictionary member, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"field\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get required() {\n return !!this.tokens.required;\n }\n}\n","import { Container } from \"./container.js\";\nimport { Field } from \"./field.js\";\n\nexport class Dictionary extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"dictionary\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(tokeniser, new Dictionary({ source: tokeniser.source, tokens }), {\n type: \"dictionary\",\n inheritable: !partial,\n allowedMembers: [\n [Field.parse],\n ]\n });\n }\n\n get type() {\n return \"dictionary\";\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { validationError } from \"../error.js\";\n\nexport class Namespace extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"namespace\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(tokeniser, new Namespace({ source: tokeniser.source, tokens }), {\n type: \"namespace\",\n allowedMembers: [\n [Attribute.parse, { noInherit: true, readonly: true }],\n [Operation.parse, { regular: true }]\n ]\n });\n }\n\n get type() {\n return \"namespace\";\n }\n\n *validate(defs) {\n if (!this.partial && this.extAttrs.every(extAttr => extAttr.name !== \"Exposed\")) {\n const message = `Namespaces must have [Exposed] extended attribute. \\\nTo fix, add, for example, [Exposed=Window]. Please also consider carefully \\\nif your namespace should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(this.source, this.tokens.name, this, message);\n }\n yield* super.validate(defs);\n }\n}\n","import { Container } from \"./container.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\n\n\nexport class CallbackInterface extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, callback, { partial = null } = {}) {\n const tokens = { callback };\n tokens.base = tokeniser.consume(\"interface\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(tokeniser, new CallbackInterface({ source: tokeniser.source, tokens }), {\n type: \"callback interface\",\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [Operation.parse, { regular: true }]\n ]\n });\n }\n\n get type() {\n return \"callback interface\";\n }\n}\n","\"use strict\";\n\nimport { Tokeniser } from \"./tokeniser.js\";\nimport { Enum } from \"./productions/enum.js\";\nimport { Includes } from \"./productions/includes.js\";\nimport { ExtendedAttributes } from \"./productions/extended-attributes.js\";\nimport { Typedef } from \"./productions/typedef.js\";\nimport { CallbackFunction } from \"./productions/callback.js\";\nimport { Interface } from \"./productions/interface.js\";\nimport { Mixin } from \"./productions/mixin.js\";\nimport { Dictionary } from \"./productions/dictionary.js\";\nimport { Namespace } from \"./productions/namespace.js\";\nimport { CallbackInterface } from \"./productions/callback-interface.js\";\n\n/**\n * @param {Tokeniser} tokeniser\n * @param {object} options\n * @param {boolean} [options.concrete]\n */\nfunction parseByTokens(tokeniser, options) {\n const source = tokeniser.source;\n\n function error(str) {\n tokeniser.error(str);\n }\n\n function consume(...candidates) {\n return tokeniser.consume(...candidates);\n }\n\n function callback() {\n const callback = consume(\"callback\");\n if (!callback) return;\n if (tokeniser.probe(\"interface\")) {\n return CallbackInterface.parse(tokeniser, callback);\n }\n return CallbackFunction.parse(tokeniser, callback);\n }\n\n function interface_(opts) {\n const base = consume(\"interface\");\n if (!base) return;\n const ret = Mixin.parse(tokeniser, base, opts) ||\n Interface.parse(tokeniser, base, opts) ||\n error(\"Interface has no proper body\");\n return ret;\n }\n\n function partial() {\n const partial = consume(\"partial\");\n if (!partial) return;\n return Dictionary.parse(tokeniser, { partial }) ||\n interface_({ partial }) ||\n Namespace.parse(tokeniser, { partial }) ||\n error(\"Partial doesn't apply to anything\");\n }\n\n function definition() {\n return callback() ||\n interface_() ||\n partial() ||\n Dictionary.parse(tokeniser) ||\n Enum.parse(tokeniser) ||\n Typedef.parse(tokeniser) ||\n Includes.parse(tokeniser) ||\n Namespace.parse(tokeniser);\n }\n\n function definitions() {\n if (!source.length) return [];\n const defs = [];\n while (true) {\n const ea = ExtendedAttributes.parse(tokeniser);\n const def = definition();\n if (!def) {\n if (ea.length) error(\"Stray extended attributes\");\n break;\n }\n def.extAttrs = ea;\n defs.push(def);\n }\n const eof = consume(\"eof\");\n if (options.concrete) {\n defs.push(eof);\n }\n return defs;\n }\n const res = definitions();\n if (tokeniser.position < source.length) error(\"Unrecognised tokens\");\n return res;\n}\n\nexport function parse(str, options = {}) {\n const tokeniser = new Tokeniser(str);\n if (typeof options.sourceName !== \"undefined\") {\n tokeniser.source.name = options.sourceName;\n }\n return parseByTokens(tokeniser, options);\n}\n","\"use strict\";\n\nfunction noop(arg) {\n return arg;\n}\n\nconst templates = {\n wrap: items => items.join(\"\"),\n trivia: noop,\n name: noop,\n reference: noop,\n type: noop,\n generic: noop,\n inheritance: noop,\n definition: noop,\n extendedAttribute: noop,\n extendedAttributeReference: noop\n};\n\nexport function write(ast, { templates: ts = templates } = {}) {\n ts = Object.assign({}, templates, ts);\n\n function reference(raw, { unescaped, context }) {\n if (!unescaped) {\n unescaped = raw.startsWith(\"_\") ? raw.slice(1) : raw;\n }\n return ts.reference(raw, unescaped, context);\n }\n\n function token(t, wrapper = noop, ...args) {\n if (!t) {\n return \"\";\n }\n const value = wrapper(t.value, ...args);\n return ts.wrap([ts.trivia(t.trivia), value]);\n }\n\n function reference_token(t, context) {\n return token(t, reference, { context });\n }\n\n function name_token(t, arg) {\n return token(t, ts.name, arg);\n }\n\n function type_body(it) {\n if (it.union || it.generic) {\n return ts.wrap([\n token(it.tokens.base, ts.generic),\n token(it.tokens.open),\n ...it.subtype.map(type),\n token(it.tokens.close)\n ]);\n }\n const firstToken = it.tokens.prefix || it.tokens.base;\n const prefix = it.tokens.prefix ? [\n it.tokens.prefix.value,\n ts.trivia(it.tokens.base.trivia)\n ] : [];\n const ref = reference(ts.wrap([\n ...prefix,\n it.tokens.base.value,\n token(it.tokens.postfix)\n ]), { unescaped: it.idlType, context: it });\n return ts.wrap([ts.trivia(firstToken.trivia), ref]);\n }\n function type(it) {\n return ts.wrap([\n extended_attributes(it.extAttrs),\n type_body(it),\n token(it.tokens.nullable),\n token(it.tokens.separator)\n ]);\n }\n function default_(def) {\n if (!def) {\n return \"\";\n }\n return ts.wrap([\n token(def.tokens.assign),\n ...def.expression.map(t => token(t))\n ]);\n }\n function argument(arg) {\n return ts.wrap([\n extended_attributes(arg.extAttrs),\n token(arg.tokens.optional),\n ts.type(type(arg.idlType)),\n token(arg.tokens.variadic),\n name_token(arg.tokens.name, { data: arg }),\n default_(arg.default),\n token(arg.tokens.separator)\n ]);\n }\n function identifier(id, context) {\n return ts.wrap([\n reference_token(id.tokens.value, context),\n token(id.tokens.separator)\n ]);\n }\n function make_ext_at(it) {\n const { rhsType } = it.params;\n return ts.wrap([\n ts.trivia(it.tokens.name.trivia),\n ts.extendedAttribute(ts.wrap([\n ts.extendedAttributeReference(it.name),\n token(it.params.tokens.assign),\n reference_token(it.params.tokens.secondaryName, it),\n token(it.params.tokens.open),\n ...!it.params.list ? [] :\n it.params.list.map(\n rhsType === \"identifier-list\" ? id => identifier(id, it) : argument\n ),\n token(it.params.tokens.close)\n ])),\n token(it.tokens.separator)\n ]);\n }\n function extended_attributes(eats) {\n if (!eats.length) return \"\";\n return ts.wrap([\n token(eats.tokens.open),\n ...eats.map(make_ext_at),\n token(eats.tokens.close)\n ]);\n }\n\n function operation(it, parent) {\n const body = it.idlType ? [\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n token(it.tokens.open),\n ts.wrap(it.arguments.map(argument)),\n token(it.tokens.close),\n ] : [];\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.special),\n ...body,\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n\n function attribute(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.special),\n token(it.tokens.readonly),\n token(it.tokens.base),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n\n function inheritance(inh) {\n if (!inh.tokens.inheritance) {\n return \"\";\n }\n return ts.wrap([\n token(inh.tokens.colon),\n ts.trivia(inh.tokens.inheritance.trivia),\n ts.inheritance(reference(inh.tokens.inheritance.value, { context: inh }))\n ]);\n }\n\n function container(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.callback),\n token(it.tokens.partial),\n token(it.tokens.base),\n token(it.tokens.mixin),\n name_token(it.tokens.name, { data: it }),\n inheritance(it),\n token(it.tokens.open),\n iterate(it.members, it),\n token(it.tokens.close),\n token(it.tokens.termination)\n ]), { data: it });\n }\n\n function field(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.required),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n default_(it.default),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n function const_(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n token(it.tokens.assign),\n token(it.tokens.value),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n function typedef(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it }),\n token(it.tokens.termination)\n ]), { data: it });\n }\n function includes(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n reference_token(it.tokens.target, it),\n token(it.tokens.includes),\n reference_token(it.tokens.mixin, it),\n token(it.tokens.termination)\n ]), { data: it });\n }\n function callback(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n name_token(it.tokens.name, { data: it }),\n token(it.tokens.assign),\n ts.type(type(it.idlType)),\n token(it.tokens.open),\n ...it.arguments.map(argument),\n token(it.tokens.close),\n token(it.tokens.termination),\n ]), { data: it });\n }\n function enum_(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n name_token(it.tokens.name, { data: it }),\n token(it.tokens.open),\n iterate(it.values, it),\n token(it.tokens.close),\n token(it.tokens.termination)\n ]), { data: it });\n }\n function enum_value(v, parent) {\n return ts.wrap([\n ts.trivia(v.tokens.value.trivia),\n ts.definition(\n ts.wrap(['\"', ts.name(v.value, { data: v, parent }), '\"']),\n { data: v, parent }\n ),\n token(v.tokens.separator)\n ]);\n }\n function iterable_like(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.readonly),\n token(it.tokens.base, ts.generic),\n token(it.tokens.open),\n ts.wrap(it.idlType.map(type)),\n token(it.tokens.close),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n function eof(it) {\n return ts.trivia(it.trivia);\n }\n\n const table = {\n interface: container,\n \"interface mixin\": container,\n namespace: container,\n operation,\n attribute,\n dictionary: container,\n field,\n const: const_,\n typedef,\n includes,\n callback,\n enum: enum_,\n \"enum-value\": enum_value,\n iterable: iterable_like,\n legacyiterable: iterable_like,\n maplike: iterable_like,\n setlike: iterable_like,\n \"callback interface\": container,\n eof\n };\n function dispatch(it, parent) {\n const dispatcher = table[it.type];\n if (!dispatcher) {\n throw new Error(`Type \"${it.type}\" is unsupported`);\n }\n return table[it.type](it, parent);\n }\n function iterate(things, parent) {\n if (!things) return;\n const results = things.map(thing => dispatch(thing, parent));\n return ts.wrap(results);\n }\n return iterate(ast);\n}\n","\"use strict\";\n\nimport { validationError as error } from \"./error.js\";\n\nfunction getMixinMap(all, unique) {\n const map = new Map();\n const includes = all.filter(def => def.type === \"includes\");\n for (const include of includes) {\n const mixin = unique.get(include.includes);\n if (!mixin) {\n continue;\n }\n const array = map.get(include.target);\n if (array) {\n array.push(mixin);\n } else {\n map.set(include.target, [mixin]);\n }\n }\n return map;\n}\n\nfunction groupDefinitions(all) {\n const unique = new Map();\n const duplicates = new Set();\n const partials = new Map();\n for (const def of all) {\n if (def.partial) {\n const array = partials.get(def.name);\n if (array) {\n array.push(def);\n } else {\n partials.set(def.name, [def]);\n }\n continue;\n }\n if (!def.name) {\n continue;\n }\n if (!unique.has(def.name)) {\n unique.set(def.name, def);\n } else {\n duplicates.add(def);\n }\n }\n return {\n all,\n unique,\n partials,\n duplicates,\n mixinMap: getMixinMap(all, unique)\n };\n}\n\nfunction* checkDuplicatedNames({ unique, duplicates }) {\n for (const dup of duplicates) {\n const { name } = dup;\n const message = `The name \"${name}\" of type \"${unique.get(name).type}\" was already seen`;\n yield error(dup.source, dup.tokens.name, dup, message);\n }\n}\n\nfunction* validateIterable(ast) {\n const defs = groupDefinitions(ast);\n for (const def of defs.all) {\n if (def.validate) {\n yield* def.validate(defs);\n }\n }\n yield* checkDuplicatedNames(defs);\n}\n\n// Remove this once all of our support targets expose `.flat()` by default\nfunction flatten(array) {\n if (array.flat) {\n return array.flat();\n }\n return [].concat(...array);\n}\n\n/**\n * @param {*} ast AST or array of ASTs\n */\nexport function validate(ast) {\n return [...validateIterable(flatten(ast))];\n}\n","export { parse } from \"./lib/webidl2.js\";\nexport { write } from \"./lib/writer.js\";\nexport { validate } from \"./lib/validator.js\";\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://WebIDL2/webpack/universalModuleDefinition","webpack://WebIDL2/webpack/bootstrap","webpack://WebIDL2/./lib/error.js","webpack://WebIDL2/./lib/tokeniser.js","webpack://WebIDL2/./lib/productions/base.js","webpack://WebIDL2/./lib/productions/type.js","webpack://WebIDL2/./lib/productions/default.js","webpack://WebIDL2/./lib/productions/array-base.js","webpack://WebIDL2/./lib/productions/extended-attributes.js","webpack://WebIDL2/./lib/productions/helpers.js","webpack://WebIDL2/./lib/productions/argument.js","webpack://WebIDL2/./lib/validators/helpers.js","webpack://WebIDL2/./lib/productions/token.js","webpack://WebIDL2/./lib/productions/operation.js","webpack://WebIDL2/./lib/productions/attribute.js","webpack://WebIDL2/./lib/productions/enum.js","webpack://WebIDL2/./lib/productions/includes.js","webpack://WebIDL2/./lib/productions/typedef.js","webpack://WebIDL2/./lib/productions/callback.js","webpack://WebIDL2/./lib/productions/container.js","webpack://WebIDL2/./lib/productions/constant.js","webpack://WebIDL2/./lib/productions/iterable.js","webpack://WebIDL2/./lib/productions/interface.js","webpack://WebIDL2/./lib/validators/interface.js","webpack://WebIDL2/./lib/productions/mixin.js","webpack://WebIDL2/./lib/productions/field.js","webpack://WebIDL2/./lib/productions/dictionary.js","webpack://WebIDL2/./lib/productions/namespace.js","webpack://WebIDL2/./lib/productions/callback-interface.js","webpack://WebIDL2/./lib/webidl2.js","webpack://WebIDL2/./lib/writer.js","webpack://WebIDL2/./lib/validator.js","webpack://WebIDL2/./index.js"],"names":["root","factory","exports","module","define","amd","this","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","error_error","source","position","current","message","type","sliceTokens","count","slice","Math","max","tokensToText","inputs","precedes","text","map","trivia","join","nextToken","length","line","precedingLastLine","splitted","split","lastLine","subsequentTokens","subsequentText","sourceContext","repeat","contextType","context","partial","bareMessage","sourceName","input","tokens","validationError","token","index","tokenRe","decimal","integer","identifier","string","whitespace","comment","other","stringTypes","argumentNameKeywords","nonRegexTerminals","concat","punctuations","tokeniser_Tokeniser","[object Object]","idl","str","lastCharIndex","nextChar","charAt","result","test","attemptTokenMatch","noFlushTrivia","currentTrivia","pop","match","includes","punctuation","startsWith","push","Error","re","lastIndex","exec","tokenise","WebIDLParseError","syntaxError","candidates","probe","super","Base","defineProperties","json","undefined","inheritance","proto","descMap","getOwnPropertyDescriptors","entries","getPrototypeOf","type_suffix","tokeniser","obj","nullable","consume","error","single_type","typeName","ret","base","type_Type","open","subtype","return_type","type_with_extended_attributes","keyType","keyIdlType","separator","valueType","idlType","close","generic_type","primitive_type","generic","typ","or","union_type","extAttrs","Boolean","union","helpers_unescape","prefix","postfix","filter","default_Default","assign","def","const_value","expression","const_data","negative","ArrayBase","Array","extended_attributes_ExtendedAttributeParameters","secondaryName","list","rhsType","ids","parser","token_Token","listName","identifiers","argument_list","hasRhs","extended_attributes_SimpleExtendedAttribute","params","parse","rhs","arguments","extended_attributes_ExtendedAttributes","argument_Argument","start_position","optional","variadic","default","unconsume","defs","idlTypeIncludesDictionary","unique","operation_Operation","special","regular","termination","argument","validate","attribute_Attribute","noInherit","readonly","allowDangler","first","items","item","num_type","integer_type","decimal_type","voidToken","stringifier","enum_EnumValue","enum_Enum","values","includes_Includes","target","mixin","typedef_Typedef","callback_CallbackFunction","container_Container","instance","inheritable","allowedMembers","colon","members","ea","mem","args","member","constant_Constant","unescape","iterable_IterableLike","secondTypeRequired","secondTypeAllowed","static_member","interface_Interface","every","extAttr","opNames","Set","getOperations","op","partials","mixins","mixinMap","ext","additions","forEachExtension","addition","add","existings","has","checkInterfaceMemberDuplication","mixin_Mixin","field_Field","required","dictionary_Dictionary","namespace_Namespace","callback_interface_CallbackInterface","callback","parseByTokens","options","interface_","opts","definition","res","eof","concrete","definitions","noop","arg","templates","wrap","reference","extendedAttribute","extendedAttributeReference","write","ast","ts","raw","unescaped","wrapper","reference_token","name_token","type_body","it","firstToken","ref","extended_attributes","default_","data","make_ext_at","id","eats","container","inh","iterate","iterable_like","parent","table","interface","interface mixin","namespace","operation","body","attribute","dictionary","field","const","typedef","enum","enum-value","v","iterable","legacyiterable","maplike","setlike","callback interface","things","results","thing","dispatch","getMixinMap","all","Map","include","array","set","validateIterable","duplicates","groupDefinitions","dup","checkDuplicatedNames","flat","__webpack_exports__"],"mappings":"CAAA,SAAAA,EAAAC,GACA,iBAAAC,SAAA,iBAAAC,OACAA,OAAAD,QAAAD,IACA,mBAAAG,eAAAC,IACAD,OAAA,GAAAH,GACA,iBAAAC,QACAA,QAAA,QAAAD,IAEAD,EAAA,QAAAC,IARA,CASCK,KAAA,WACD,mBCTA,IAAAC,EAAA,GAGA,SAAAC,EAAAC,GAGA,GAAAF,EAAAE,GACA,OAAAF,EAAAE,GAAAP,QAGA,IAAAC,EAAAI,EAAAE,GAAA,CACAC,EAAAD,EACAE,GAAA,EACAT,QAAA,IAUA,OANAU,EAAAH,GAAAI,KAAAV,EAAAD,QAAAC,IAAAD,QAAAM,GAGAL,EAAAQ,GAAA,EAGAR,EAAAD,QA0DA,OArDAM,EAAAM,EAAAF,EAGAJ,EAAAO,EAAAR,EAGAC,EAAAQ,EAAA,SAAAd,EAAAe,EAAAC,GACAV,EAAAW,EAAAjB,EAAAe,IACAG,OAAAC,eAAAnB,EAAAe,EAAA,CAA0CK,YAAA,EAAAC,IAAAL,KAK1CV,EAAAgB,EAAA,SAAAtB,GACA,oBAAAuB,eAAAC,aACAN,OAAAC,eAAAnB,EAAAuB,OAAAC,YAAA,CAAwDC,MAAA,WAExDP,OAAAC,eAAAnB,EAAA,cAAiDyB,OAAA,KAQjDnB,EAAAoB,EAAA,SAAAD,EAAAE,GAEA,GADA,EAAAA,IAAAF,EAAAnB,EAAAmB,IACA,EAAAE,EAAA,OAAAF,EACA,KAAAE,GAAA,iBAAAF,QAAAG,WAAA,OAAAH,EACA,IAAAI,EAAAX,OAAAY,OAAA,MAGA,GAFAxB,EAAAgB,EAAAO,GACAX,OAAAC,eAAAU,EAAA,WAAyCT,YAAA,EAAAK,UACzC,EAAAE,GAAA,iBAAAF,EAAA,QAAAM,KAAAN,EAAAnB,EAAAQ,EAAAe,EAAAE,EAAA,SAAAA,GAAgH,OAAAN,EAAAM,IAAqBC,KAAA,KAAAD,IACrI,OAAAF,GAIAvB,EAAA2B,EAAA,SAAAhC,GACA,IAAAe,EAAAf,KAAA2B,WACA,WAA2B,OAAA3B,EAAA,SAC3B,WAAiC,OAAAA,GAEjC,OADAK,EAAAQ,EAAAE,EAAA,IAAAA,GACAA,GAIAV,EAAAW,EAAA,SAAAiB,EAAAC,GAAsD,OAAAjB,OAAAkB,UAAAC,eAAA1B,KAAAuB,EAAAC,IAGtD7B,EAAAgC,EAAA,GAIAhC,IAAAiC,EAAA,kCCtEA,SAASC,EAAKC,EAAAC,EAAAC,EAAAC,EAAAC,GAId,SAAAC,EAAAC,GACA,OAAAA,EAAA,EACAN,EAAAO,MAAAN,IAAAK,GACAN,EAAAO,MAAAC,KAAAC,IAAAR,EAAAK,EAAA,GAAAL,GAGA,SAAAS,EAAAC,GAAAC,SAAiCA,GAAW,IAC5C,MAAAC,EAAAF,EAAAG,IAAA7B,KAAA8B,OAAA9B,EAAAD,OAAAgC,KAAA,IACAC,EAAAjB,EAAAC,GACA,cAAAgB,EAAAb,KACAS,EAEAD,EACAC,EAAAI,EAAAF,OAEAF,EAAAN,MAAAU,EAAAF,OAAAG,QAGA,MACAC,EACA,QAAAnB,EAAAC,GAAAG,KAAAJ,EAAAC,GAAAkB,KACAnB,EAAAkB,OAAA,EAAAlB,EAAAC,EAAA,GAAAkB,KACA,EAEAC,EArCA,SAAAP,GACA,MAAAQ,EAAAR,EAAAS,MAAA,MACA,OAAAD,IAAAH,OAAA,GAmCAK,CACAb,EAAAL,GAPA,GAOA,CAA2CO,UAAA,KAG3CY,EAAAnB,EAVA,GAWAoB,EAAAf,EAAAc,GAIAE,EAAAN,EAHAK,EAAAH,MAAA,SAGA,MADA,IAAAK,OAAAP,EAAAF,QAAA,KAGAU,EAAA,WAAAxB,EAAA,iBAGAyB,KAAqBzB,mBAAsBe,IAF3CnB,EAAA1B,YAA4C0B,EAAA1B,OAAY,KACxD4B,OAA4C0B,OAAiB1B,EAAA4B,QAAA,gBAAoC5B,EAAAE,QAAgBF,EAAA5B,SAAa,QACrCoD,IACzF,OACAvB,WAAgB0B,KAAW1B,IAC3B4B,YAAA5B,EACA0B,UACAV,OACAa,WAAAhC,EAAA1B,KACA2D,MAAAR,EACAS,OAAAV,GAcO,SAAAW,EAAAnC,EAAAoC,EAAAlC,EAAAC,GACP,OAASJ,EAAKC,EAAAoC,EAAAC,MAAAnC,EAAAC,EAAA,qBCzEd,MAAAmC,EAAA,CAGAC,QAAA,sGACAC,QAAA,8CACAC,WAAA,+BACAC,OAAA,WACAC,WAAA,cACAC,QAAA,iDACAC,MAAA,wBAGOC,EAAA,CACP,aACA,YACA,aAGOC,EAAA,CACP,YACA,WACA,QACA,UACA,aACA,OACA,SACA,WACA,UACA,YACA,WACA,UACA,YACA,UACA,WACA,UACA,SACA,SACA,cACA,UACA,gBAGAC,EAAA,CACA,YACA,cACA,WACA,MACA,UACA,UACA,OACA,SACA,QACA,QACA,aACA,iBACA,OACA,QACA,OACA,QACA,WACA,KACA,WACA,SACA,WACA,QACA,OACA,WACA,QACAC,OAAAF,EAAAD,GAEAI,EAAA,CACA,IACA,IACA,IACA,MACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,IACA,KA6FO,MAAMC,EAIbC,YAAAC,GACA1F,KAAAqC,OA5FA,SAAAsD,GACA,MAAApB,EAAA,GACA,IAAAqB,EAAA,EACAxC,EAAA,GACAI,EAAA,EACAkB,EAAA,EACA,KAAAkB,EAAAD,EAAApC,QAAA,CACA,MAAAsC,EAAAF,EAAAG,OAAAF,GACA,IAAAG,GAAA,EAQA,GANA,YAAAC,KAAAH,GACAE,EAAAE,EAAA,cAAgDC,eAAA,IAC3C,MAAAL,IACLE,EAAAE,EAAA,WAA6CC,eAAA,MAG7C,IAAAH,EAAA,CACA,MAAAI,EAAA5B,EAAA6B,MAAA/E,MACAmC,IAAA2C,EAAAE,MAAA,YAAA9C,OACAH,GAAA+C,EACAzB,GAAA,OACK,oBAAAsB,KAAAH,IAKL,IAHA,KADAE,EAAAE,EAAA,cAEAF,EAAAE,EAAA,aAEA,IAAAF,EAAA,CACAA,EAAAE,EAAA,cACA,MAAAxB,EAAAF,IAAAhB,OAAA,IACA,IAAAwC,GAAAV,EAAAiB,SAAA7B,EAAApD,SACAoD,EAAAhC,KAAAgC,EAAApD,YAGK,MAAAwE,IACLE,EAAAE,EAAA,WAGA,UAAAM,KAAAhB,EACA,GAAAI,EAAAa,WAAAD,EAAAX,GAAA,CACArB,EAAAkC,KAAA,CAAqBhE,KAAA8D,EAAAlF,MAAAkF,EAAAnD,SAAAI,OAAAkB,UACrBtB,EAAA,GAEA2C,EADAH,GAAAW,EAAAhD,OAEA,MAQA,IAHA,IAAAwC,IACAA,EAAAE,EAAA,WAEA,IAAAF,EACA,UAAAW,MAAA,gCAEAd,EAAAG,EACArB,GAAA,EAUA,OANAH,EAAAkC,KAAA,CACAhE,KAAA,MACApB,MAAA,GACA+B,WAGAmB,EAOA,SAAA0B,EAAAxD,GAAAyD,cAAoCA,GAAgB,IACpD,MAAAS,EAAAhC,EAAAlC,GACAkE,EAAAC,UAAAhB,EACA,MAAAG,EAAAY,EAAAE,KAAAlB,GACA,OAAAI,GACAxB,EAAAkC,KAAA,CAAmBhE,OAAApB,MAAA0E,EAAA,GAAA3C,SAAAI,OAAAkB,UACnBwB,IACA9C,EAAA,IAEAuD,EAAAC,YAEA,GASAE,CAAApB,GACA1F,KAAAsC,SAAA,EAMAmD,MAAAjD,GACA,UAAAuE,ED7HO,SAAA1E,EAAAC,EAAAC,EAAAC,GACP,OAASJ,EAAKC,EAAAC,EAAAC,EAAAC,EAAA,UC4HiBwE,CAAWhH,KAAAqC,OAAArC,KAAAsC,SAAAtC,KAAAuC,QAAAC,IAM1CiD,MAAAhD,GACA,OAAAzC,KAAAqC,OAAAkB,OAAAvD,KAAAsC,UAAAtC,KAAAqC,OAAArC,KAAAsC,UAAAG,SAMAgD,WAAAwB,GACA,UAAAxE,KAAAwE,EAAA,CACA,IAAAjH,KAAAkH,MAAAzE,GAAA,SACA,MAAAgC,EAAAzE,KAAAqC,OAAArC,KAAAsC,UAEA,OADAtC,KAAAsC,WACAmC,GAOAgB,UAAAnD,GACAtC,KAAAsC,YAIA,MAAAyE,UAAAL,MACAjB,aAAAjD,QAAeA,EAAA4B,cAAAF,UAAAV,OAAAa,aAAAC,QAAAC,WACf4C,MAAA3E,GAEAxC,KAAAW,KAAA,mBACAX,KAAAoE,cACApE,KAAAkE,UACAlE,KAAAwD,OACAxD,KAAAqE,aACArE,KAAAsE,QACAtE,KAAAuE,UC1OO,MAAA6C,EACP3B,aAAApD,OAAeA,EAAAkC,WACfzD,OAAAuG,iBAAArH,KAAA,CACAqC,OAAA,CAAehB,MAAAgB,GACfkC,OAAA,CAAelD,MAAAkD,KAIfkB,SACA,MAAA6B,EAAA,CAAkB7E,UAAA8E,EAAA5G,UAAA4G,EAAAC,iBAAAD,GAClB,IAAAE,EAAAzH,KACA,KAAAyH,IAAA3G,OAAAkB,WAAA,CACA,MAAA0F,EAAA5G,OAAA6G,0BAAAF,GACA,UAAA9F,EAAAN,KAAAP,OAAA8G,QAAAF,IACArG,EAAAL,YAAAK,EAAAJ,OACAqG,EAAA3F,GAAA3B,KAAA2B,IAGA8F,EAAA3G,OAAA+G,eAAAJ,GAEA,OAAAH,GC2BA,SAAAQ,EAAAC,EAAAC,GACA,MAAAC,EAAAF,EAAAG,QAAA,KACAD,IACAD,EAAAzD,OAAA0D,YAEAF,EAAAb,MAAA,MAAAa,EAAAI,MAAA,iCAOA,SAAAC,EAAAL,EAAAM,GACA,IAAAC,EApDA,SAAAP,EAAAM,GACA,MAAAE,EAAAR,EAAAG,QAAA,6CACA,IAAAK,EACA,OAEA,MAAAD,EAAA,IAAkBE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,UAE5D,OADAD,EAAA/D,OAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,kCAA0FI,EAAA9F,QAC1F8F,EAAA9F,MACA,eACAsF,EAAAb,MAAA,MAAAa,EAAAI,MAAA,+CACA,MAAAO,EAAsBC,EAAWZ,EAAAM,IAAAN,EAAAI,MAAA,2BACjCG,EAAAI,QAAAjC,KAAAiC,GACA,MAEA,eACA,mBACA,MAAAA,EAAsBE,EAA6Bb,EAAAM,IAAAN,EAAAI,iBAAoDI,EAAA9F,gBACvG6F,EAAAI,QAAAjC,KAAAiC,GACA,MAEA,cACAX,EAAAb,MAAA,MAAAa,EAAAI,MAAA,6CACA,MAAAU,EAAAd,EAAAG,WAA2C/C,IAAW4C,EAAAI,oCAAmDhD,EAAW9B,KAAA,SACpHyF,EAAA,IAA6BN,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,KAAAM,KACvEC,EAAAvE,OAAAwE,UAAAhB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,uCACAW,EAAArG,KAAA4F,EACA,MAAAW,EAAwBJ,EAA6Bb,EAAAM,IAAAN,EAAAI,MAAA,qCACrDG,EAAAI,QAAAjC,KAAAqC,EAAAE,GACA,OAKA,OAFAV,EAAAW,SAAAlB,EAAAI,oCAAkEI,EAAA9F,QAClE6F,EAAA/D,OAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,uCAAgGI,EAAA9F,QAChG6F,EAmBAa,CAAApB,EAAAM,IAAiDe,EAAcrB,GAC/D,IAAAO,EAAA,CACA,MAAAC,EAAAR,EAAAG,QAAA,gBAAoD/C,GACpD,IAAAoD,EACA,OAEAD,EAAA,IAAcE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,UACxDR,EAAAb,MAAA,MAAAa,EAAAI,kCAA0EI,EAAAlH,SAQ1E,MANA,YAAAiH,EAAAe,SAAAtB,EAAAb,MAAA,MACAa,EAAAI,MAAA,mCAEAG,EAAA7F,KAAA4F,GAAA,KACAP,EAAAC,EAAAO,GACAA,EAAAL,UAAA,QAAAK,EAAAW,SAAAlB,EAAAI,MAAA,sCACAG,EA+BO,MAAME,UAAapB,EAK1B3B,aAAAsC,EAAAM,GACA,OAAAD,EAAAL,EAAAM,IA9BA,SAAAN,EAAAtF,GACA,MAAA8B,EAAA,GAEA,GADAA,EAAAkE,KAAAV,EAAAG,QAAA,MACA3D,EAAAkE,KAAA,OACA,MAAAH,EAAA,IAAkBE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,WAExB,IADA+D,EAAA7F,QAAA,OACA,CACA,MAAA6G,EAAgBV,EAA6Bb,MAAAI,MAAA,wDAC7C,QAAAmB,EAAAL,SAAAlB,EAAAI,MAAA,iDACAG,EAAAI,QAAAjC,KAAA6C,GACA,MAAAC,EAAAxB,EAAAG,QAAA,MACA,IAAAqB,EAGA,MAFAD,EAAA/E,OAAAwE,UAAAQ,EASA,OALAjB,EAAAW,QAAA1F,OAAA,GACAwE,EAAAI,MAAA,kEAEA5D,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,2BACAL,EAAAC,EAAAO,GACAA,EASAkB,CAAAzB,EAAAM,GAGA5C,aAAApD,OAAeA,EAAAkC,WACf4C,MAAA,CAAW9E,SAAAkC,WACXzD,OAAAC,eAAAf,KAAA,WAA4CqB,MAAA,KAC5CrB,KAAAyJ,SAAA,GAGAJ,cACA,OAAArJ,KAAA0I,QAAAnF,QAAAvD,KAAAuE,OAAAgE,KACAvI,KAAAuE,OAAAgE,KAAAlH,MAEA,GAEA4G,eACA,OAAAyB,QAAA1J,KAAAuE,OAAA0D,UAEA0B,YACA,OAAAD,QAAA1J,KAAA0I,QAAAnF,UAAAvD,KAAAuE,OAAAgE,KAEAU,cACA,GAAAjJ,KAAA0I,QAAAnF,OACA,OAAAvD,KAAA0I,QAQA,OAAWkB,EALX,CACA5J,KAAAuE,OAAAsF,OACA7J,KAAAuE,OAAAgE,KACAvI,KAAAuE,OAAAuF,SACAC,OAAAzI,MAAA6B,IAAA7B,KAAAD,OAAAgC,KAAA,OC3IO,MAAM2G,UAAgB5C,EAI7B3B,aAAAsC,GACA,MAAAkC,EAAAlC,EAAAG,QAAA,KACA,IAAA+B,EACA,YAEA,MAAAC,EAAgBC,EAAWpC,MAAAG,QAAA,0BAA0DH,EAAAI,MAAA,wBACrFiC,EAAA,CAAAF,GACA,SAAAA,EAAAzH,KAAA,CACA,MAAAyG,EAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,wCACAiC,EAAA3D,KAAAyC,QACK,SAAAgB,EAAAzH,KAAyB,CAC9B,MAAAyG,EAAAnB,EAAAG,QAAA,MAAwCH,EAAAI,MAAA,0CACxCiC,EAAA3D,KAAAyC,GAEA,WAAec,EAAO,CAAE3H,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoC0F,UAASG,eAGrE3E,aAAApD,OAAeA,EAAAkC,SAAA6F,eACfjD,MAAA,CAAW9E,SAAAkC,WACXzD,OAAAC,eAAAf,KAAA,cAA+CqB,MAAA+I,IAG/C3H,WACA,OAAW4H,EAAUrK,KAAAoK,WAAA,IAAA3H,KAErBpB,YACA,OAAWgJ,EAAUrK,KAAAoK,WAAA,IAAA/I,MAErBiJ,eACA,OAAWD,EAAUrK,KAAAoK,WAAA,IAAAE,UCpCd,MAAAC,UAAAC,MACP/E,aAAApD,OAAeA,EAAAkC,WACf4C,QACArG,OAAAuG,iBAAArH,KAAA,CACAqC,OAAA,CAAehB,MAAAgB,GACfkC,OAAA,CAAelD,MAAAkD,MCDf,MAAMkG,UAAoCrD,EAI1C3B,aAAAsC,GACA,MAAAxD,EAAA,CAAoB0F,OAAAlC,EAAAG,QAAA,MACpBI,EAAA,IAAoBmC,EAA2B,CAAEpI,OAAA0F,EAAA1F,OAAAkC,WAejD,OAdAA,EAAA0F,SACA1F,EAAAmG,cAAA3C,EAAAG,QAAA,4CAEA3D,EAAAkE,KAAAV,EAAAG,QAAA,KACA3D,EAAAkE,MACAH,EAAAqC,KAAA,oBAAArC,EAAAsC,QCiGO,SAAA7C,GACP,MAAA8C,EAAAF,EAAA5C,EAAA,CAA+B+C,OAASC,EAAKD,OAAA/C,EAAA,cAAAiD,SAAA,oBAC7CH,EAAAtH,QACAwE,EAAAI,MAAA,uCAEA,OAAA0C,EDpGQI,CAAWlD,GAEXmD,EAAanD,GACrBxD,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,yDACKG,EAAA6C,SAAA5G,EAAAmG,eACL3C,EAAAI,MAAA,uDAEAG,EAGAsC,cACA,OAAA5K,KAAAuE,OAAA0F,OACAjK,KAAAuE,OAAAmG,cACA1K,KAAAuE,OAAAmG,cAAAjI,KADA,kBADA,MAMA,MAAM2I,UAAgChE,EAItC3B,aAAAsC,GACA,MAAApH,EAAAoH,EAAAG,QAAA,cACA,GAAAvH,EACA,WAAiByK,EAAuB,CACxC7G,OAAA,CAAiB5D,QACjB0K,OAAgBZ,EAA2Ba,MAAAvD,KAK3CtC,aAAApD,OAAeA,EAAAkC,SAAA8G,WACflE,MAAA,CAAW9E,SAAAkC,WACXzD,OAAAC,eAAAf,KAAA,UAA2CqB,MAAAgK,IAG3C5I,WACA,2BAEA9B,WACA,OAAAX,KAAAuE,OAAA5D,KAAAU,MAEAkK,UACA,MAAWX,QAAAnI,EAAA8B,SAAAoG,QAA8B3K,KAAAqL,OACzC,OAAA5I,EAIA,CAAYA,OAAApB,MADZ,oBAAAoB,EAAAkI,EAAApG,EAAAmG,cAAArJ,OAFA,KAKAmK,gBACA,MAAAZ,QAAWA,EAAAD,QAAgB3K,KAAAqL,OAC3B,OAAAV,GAAA,oBAAAC,EAGAD,EAFA,IAQO,MAAMc,UAA2BlB,EAIxC9E,aAAAsC,GACA,MAAAxD,EAAA,GAEA,GADAA,EAAAkE,KAAAV,EAAAG,QAAA,MACA3D,EAAAkE,KAAA,SACA,MAAAH,EAAA,IAAoBmD,EAAkB,CAAEpJ,OAAA0F,EAAA1F,OAAAkC,WAYxC,OAXA+D,EAAA7B,QAAgBkE,EAAI5C,EAAA,CACpB+C,OAAcM,EAAuBE,MACrCN,SAAA,wBAEAzG,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,kDACAG,EAAA/E,QACAwE,EAAAI,MAAA,qCAEAJ,EAAAb,MAAA,MACAa,EAAAI,MAAA,kEAEAG,GE3FO,MAAMoD,UAAiBtE,EAI9B3B,aAAAsC,GACA,MAAA4D,EAAA5D,EAAAzF,SACAiC,EAAA,GACA+D,EAAA,IAAoBoD,EAAQ,CAAErJ,OAAA0F,EAAA1F,OAAAkC,WAI9B,OAHA+D,EAAAmB,SAAmBgC,EAAkBH,MAAAvD,GACrCxD,EAAAqH,SAAA7D,EAAAG,QAAA,YACAI,EAAAW,QAAkBL,EAA6Bb,EAAA,iBAC/CO,EAAAW,SAGA1E,EAAAqH,WACArH,EAAAsH,SAAA9D,EAAAG,QAAA,QAEA3D,EAAA5D,KAAAoH,EAAAG,QAAA,gBAAqD9C,GACrDb,EAAA5D,MAGA2H,EAAAwD,QAAAvH,EAAAqH,SAAoC5B,EAAOsB,MAAAvD,GAAA,KAC3CO,GAHAP,EAAAgE,UAAAJ,IAPA5D,EAAAgE,UAAAJ,GAaAlJ,WACA,iBAEAmJ,eACA,QAAA5L,KAAAuE,OAAAqH,SAEAC,eACA,QAAA7L,KAAAuE,OAAAsH,SAEAlL,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,OAGnBoE,UAAAuG,GACA,GC/CO,SAAAC,EAAAhD,EAAA+C,GACP,IAAA/C,EAAAU,MAAA,CACA,MAAAO,EAAA8B,EAAAE,OAAAjL,IAAAgI,WACA,QAAAiB,IAGA,YAAAA,EAAAzH,KACAwJ,EAAA/B,EAAAjB,QAAA+C,GAEA,eAAA9B,EAAAzH,MAEA,UAAAiG,KAAAO,EAAAP,QACA,GAAAuD,EAAAvD,EAAAsD,GACA,SAGA,SD+BQC,CAAyBjM,KAAAiJ,QAAA+C,IACjChM,KAAA4L,WAAA5L,KAAA8L,QAAA,CACA,MAAAtJ,EAAA,yEACcgC,EAAexE,KAAAqC,OAAArC,KAAAuE,OAAA5D,KAAAX,KAAAwC,KEhDtB,MAAMuI,UAAc3D,EAK3B3B,cAAAsC,EAAAtF,GACA,WACA,MAAApB,EAAA0G,EAAAG,QAAAzF,GACA,GAAApB,EACA,WAAmB0J,EAAK,CAAE1I,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoClD,YAK9DA,YACA,OAAArB,KAAAuE,OAAAlD,aCdO,MAAM8K,UAAkB/E,EAI/B3B,aAAAsC,GAAAqE,QAA2BA,EAAAC,WAAmB,IAC9C,MAAA9H,EAAA,CAAoB6H,WACpB9D,EAAA,IAAoB6D,EAAS,CAAE9J,OAAA0F,EAAA1F,OAAAkC,WAC/B,OAAA6H,GAAA,gBAAAA,EAAA/K,QACAkD,EAAA+H,YAAAvE,EAAAG,QAAA,KACA3D,EAAA+H,cACAhE,EAAAkD,UAAA,GACAlD,IAGA8D,GAAAC,IACA9H,EAAA6H,QAAArE,EAAAG,QAAA,8BAEAI,EAAAW,QAAkBN,EAAWZ,MAAAI,MAAA,uBAC7B5D,EAAA5D,KAAAoH,EAAAG,QAAA,cACA3D,EAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,MAAA,qBACAG,EAAAkD,UAAoBN,EAAanD,GACjCxD,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,0BACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,wCAC7CG,GAGA7F,WACA,kBAEA9B,WACA,MAAAA,KAAWA,GAAOX,KAAAuE,OAClB,OAAA5D,EAGWiJ,EAAQjJ,EAAAU,OAFnB,GAIA+K,cACA,OAAApM,KAAAuE,OAAA6H,QAGApM,KAAAuE,OAAA6H,QAAA/K,MAFA,GAKAoE,UAAAuG,GACA,UAAAO,KAAAvM,KAAAwL,gBACAe,EAAAC,SAAAR,IC7CO,MAAMS,UAAkBrF,EAI/B3B,aAAAsC,GAAAqE,QAA2BA,EAAAM,aAAA,EAAAC,YAAA,GAA+C,IAC1E,MAAAhB,EAAA5D,EAAAzF,SACAiC,EAAA,CAAoB6H,WACpB9D,EAAA,IAAoBmE,EAAS,CAAEpK,OAAA0F,EAAA1F,OAAAkC,WAY/B,GAXA6H,GAAAM,IACAnI,EAAA6H,QAAArE,EAAAG,QAAA,YAEA,YAAAI,EAAA8D,SAAArE,EAAAb,MAAA,aACAa,EAAAI,MAAA,4CAEA5D,EAAAoI,SAAA5E,EAAAG,QAAA,YACAyE,IAAApI,EAAAoI,UAAA5E,EAAAb,MAAA,cACAa,EAAAI,MAAA,+CAEA5D,EAAAgE,KAAAR,EAAAG,QAAA,aACA3D,EAAAgE,KAAA,CAKA,OADAD,EAAAW,QAAkBL,EAA6Bb,EAAA,mBAAAA,EAAAI,MAAA,0BAC/CG,EAAAW,QAAAI,SACA,eACA,aAAAtB,EAAAI,kCAAiEG,EAAAW,QAAAI,iBAIjE,OAFA9E,EAAA5D,KAAAoH,EAAAG,QAAA,0BAAAH,EAAAI,MAAA,0BACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,wCAC7CG,EAVAP,EAAAgE,UAAAJ,GAaAlJ,WACA,kBAEA2J,cACA,OAAApM,KAAAuE,OAAA6H,QAGApM,KAAAuE,OAAA6H,QAAA/K,MAFA,GAIAsL,eACA,QAAA3M,KAAAuE,OAAAoI,SAEAhM,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QLvCZ,SAASuI,EAAQ9E,GACxB,OAAAA,EAAA0B,WAAA,KAAA1B,EAAAlC,MAAA,GAAAkC,EAWO,SAAA6F,EAAA5C,GAAA+C,OAA0BA,EAAA8B,eAAA5B,WAAA,SACjC,MAAA6B,EAAA/B,EAAA/C,GACA,IAAA8E,EACA,SAEAA,EAAAtI,OAAAwE,UAAAhB,EAAAG,QAAA,KACA,MAAA4E,EAAA,CAAAD,GACA,KAAAA,EAAAtI,OAAAwE,WAAA,CACA,MAAAgE,EAAAjC,EAAA/C,GACA,IAAAgF,EAAA,CACAH,GACA7E,EAAAI,2BAA6C6C,KAE7C,MAIA,GAFA+B,EAAAxI,OAAAwE,UAAAhB,EAAAG,QAAA,KACA4E,EAAArG,KAAAsG,IACAA,EAAAxI,OAAAwE,UAAA,MAEA,OAAA+D,EAMO,SAAA3C,EAAApC,GACP,OAAAA,EAAAG,QAAA,iEAQO,SAAAmC,GAAA5H,KAAqBA,EAAApB,UAC5B,OAAAoB,GACA,WACA,YACA,OAAcA,KAAA,UAAApB,MAAA,SAAAoB,GACd,eACA,gBACA,OAAcA,KAAA,WAAA6H,SAAA7H,EAAA+D,WAAA,MACd,QACA,OAAc/D,KAAA,WAAApB,MAAA,IACd,QACA,OAAcoB,KAAA,cACd,cACA,cACA,OAAcA,KAAA,SAAApB,SACd,aACA,OAAcoB,KAAA,SAAApB,QAAAuB,MAAA,OACd,QACA,OAAcH,SAOP,SAAA2G,EAAArB,GAoBP,MAAA1F,OAASA,GAAS0F,EAClBiF,EApBA,WACA,MAAAnD,EAAA9B,EAAAG,QAAA,YACAK,EAAAR,EAAAG,QAAA,gBACA,GAAAK,EAAA,CACA,MAAAuB,EAAA/B,EAAAG,QAAA,QACA,WAAiBM,EAAI,CAAEnG,SAAAkC,OAAA,CAAkBsF,SAAAtB,OAAAuB,aAEzCD,GAAA9B,EAAAI,MAAA,gCAaA8E,IAVA,WACA,MAAApD,EAAA9B,EAAAG,QAAA,gBACAK,EAAAR,EAAAG,QAAA,kBACA,GAAAK,EACA,WAAiBC,EAAI,CAAEnG,SAAAkC,OAAA,CAAkBsF,SAAAtB,UAEzCsB,GAAA9B,EAAAI,MAAA,8BAIA+E,GACA,GAAAF,EAAA,OAAAA,EACA,MAAAzE,EAAAR,EAAAG,QAAA,0BACA,OAAAK,EACA,IAAeC,EAAI,CAAEnG,SAAAkC,OAAA,CAAkBgE,eADvC,EAmBO,SAAA2C,EAAAnD,GACP,OAAA4C,EAAA5C,EAAA,CAA0B+C,OAASY,EAAQJ,MAAAN,SAAA,mBAOpC,SAAApC,EAAAb,EAAAM,GACP,MAAAoB,EAAmBgC,EAAkBH,MAAAvD,GACrCO,EAAcE,EAAI8C,MAAAvD,EAAAM,GAElB,OADAC,MAAAmB,YACAnB,EAOO,SAAAK,EAAAZ,EAAAM,GACP,MAAAiB,EAAcd,EAAI8C,MAAAvD,EAAAM,GAAA,eAClB,GAAAiB,EACA,OAAAA,EAEA,MAAA6D,EAAApF,EAAAG,QAAA,QACA,GAAAiF,EAAA,CACA,MAAA7E,EAAA,IAAoBE,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,KAAA4E,KAE9D,OADA7E,EAAA7F,KAAA,cACA6F,GAOO,SAAA8E,EAAArF,GACP,MAAAqE,EAAArE,EAAAG,QAAA,eACA,GAAAkE,EAIA,OAHiBK,EAASnB,MAAAvD,EAAA,CAAmBqE,aACzCD,EAASb,MAAAvD,EAAA,CAAmBqE,aAChCrE,EAAAI,MAAA,4BMhKA,MAAMkF,UAAkBtC,EAIxBtF,aAAAsC,GACA,MAAA1G,EAAA0G,EAAAG,QAAA,UACA,GAAA7G,EACA,WAAiBgM,EAAS,CAAEhL,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoClD,WAIhEoB,WACA,mBAEApB,YACA,OAAA8F,MAAA9F,MAAAuB,MAAA,OAIO,MAAM0K,UAAalG,EAI1B3B,aAAAsC,GACA,MAAAxD,EAAA,GAEA,GADAA,EAAAgE,KAAAR,EAAAG,QAAA,SACA3D,EAAAgE,KACA,OAEAhE,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,oBACA,MAAAG,EAAAP,EAAAxF,QAAA,IAAwC+K,EAAI,CAAEjL,OAAA0F,EAAA1F,OAAAkC,WAe9C,OAdAA,EAAAkE,KAAAV,EAAAG,QAAA,MAAsCH,EAAAI,MAAA,iBACtCG,EAAAiF,OAAiB5C,EAAI5C,EAAA,CACrB+C,OAAcuC,EAAS/B,MACvBsB,cAAA,EACA5B,SAAA,gBAEAjD,EAAAb,MAAA,WACAa,EAAAI,MAAA,gCAEA5D,EAAA2E,MAAAnB,EAAAG,QAAA,MAAuCH,EAAAI,MAAA,4BACvCG,EAAAiF,OAAAhK,QACAwE,EAAAI,MAAA,oBAEA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,2BAC7CG,EAGA7F,WACA,aAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCrDZ,MAAMmM,UAAiBpG,EAI9B3B,aAAAsC,GACA,MAAA0F,EAAA1F,EAAAG,QAAA,cACA,IAAAuF,EACA,OAEA,MAAAlJ,EAAA,CAAoBkJ,UAEpB,GADAlJ,EAAA+B,SAAAyB,EAAAG,QAAA,YACA3D,EAAA+B,SAMA,OAFA/B,EAAAmJ,MAAA3F,EAAAG,QAAA,eAAAH,EAAAI,MAAA,iCACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,2CAC7C,IAAeqF,EAAQ,CAAEnL,OAAA0F,EAAA1F,OAAAkC,WALzBwD,EAAAgE,UAAA0B,EAAA/I,OAQAjC,WACA,iBAEAgL,aACA,OAAW7D,EAAQ5J,KAAAuE,OAAAkJ,OAAApM,OAEnBiF,eACA,OAAWsD,EAAQ5J,KAAAuE,OAAAmJ,MAAArM,QC3BZ,MAAMsM,UAAgBvG,EAI7B3B,aAAAsC,GACA,MAAAxD,EAAA,GACA+D,EAAA,IAAoBqF,EAAO,CAAEtL,OAAA0F,EAAA1F,OAAAkC,WAE7B,GADAA,EAAAgE,KAAAR,EAAAG,QAAA,WACA3D,EAAAgE,KAOA,OAJAD,EAAAW,QAAkBL,EAA6Bb,EAAA,iBAAAA,EAAAI,MAAA,wBAC/C5D,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,wBACAJ,EAAAxF,QAAA+F,EACA/D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,sCAC7CG,EAGA7F,WACA,gBAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCtBZ,MAAMuM,UAAyBxG,EAItC3B,aAAAsC,EAAAQ,GACA,MAAAhE,EAAA,CAAoBgE,QACpBD,EAAA,IAAoBsF,EAAgB,CAAEvL,OAAA0F,EAAA1F,OAAAkC,WAStC,OARAA,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,yBACAJ,EAAAxF,QAAA+F,EACA/D,EAAA0F,OAAAlC,EAAAG,QAAA,MAAAH,EAAAI,MAAA,gCACAG,EAAAW,QAAkBN,EAAWZ,MAAAI,MAAA,gCAC7B5D,EAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,MAAA,4CACAG,EAAAkD,UAAoBN,EAAanD,GACjCxD,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,MAAA,yBACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,uCAC7CG,EAGA7F,WACA,iBAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,QCTZ,MAAMwM,UAAkBzG,EAM/B3B,aAAAsC,EAAA+F,GAAArL,KAAuCA,EAAAsL,cAAAC,mBACvC,MAAAzJ,OAAaA,GAASuJ,EAQtB,IAPAvJ,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,yBAA0F2F,EAAArL,QAC1FsF,EAAAxF,QAAAuL,EACAC,GACAjN,OAAAmJ,OAAA1F,EApBA,SAAAwD,GACA,MAAAkG,EAAAlG,EAAAG,QAAA,KACA,OAAA+F,EAIA,CAAUA,QAAAzG,YADVO,EAAAG,QAAA,eAAAH,EAAAI,MAAA,6BAFA,GAiBAX,CAAAO,IAEAxD,EAAAkE,KAAAV,EAAAG,QAAA,MAAwCH,EAAAI,kBAAkC1F,KAC1EqL,EAAAI,QAAA,KACA,CAEA,GADA3J,EAAA2E,MAAAnB,EAAAG,QAAA,KACA3D,EAAA2E,MAEA,OADA3E,EAAA+H,YAAAvE,EAAAG,QAAA,MAAmDH,EAAAI,iCAAiD1F,KACpGqL,EAEA,MAAAK,EAAmB1C,EAAkBH,MAAAvD,GACrC,IAAAqG,EACA,UAAAtD,KAAAuD,KAAAL,EAEA,GADAI,EAAAtD,EAAA/C,KAAAsG,GAEA,MAGAD,GACArG,EAAAI,MAAA,kBAEAiG,EAAA3E,SAAA0E,EACAL,EAAAI,QAAAzH,KAAA2H,IAIAjK,cACA,QAAAnE,KAAAuE,OAAAJ,QAEAxD,WACA,OAAaiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,OAErBmG,kBACA,OAAAxH,KAAAuE,OAAAiD,YAGaoC,EAAQ5J,KAAAuE,OAAAiD,YAAAnG,OAFrB,KAKAoE,UAAAuG,GACA,UAAAsC,KAAAtO,KAAAkO,QACAI,EAAA9B,iBACA8B,EAAA9B,SAAAR,KCjEO,MAAMuC,UAAiBnH,EAI9B3B,aAAAsC,GACA,MAAAxD,EAAA,GAEA,GADAA,EAAAgE,KAAAR,EAAAG,QAAA,UACA3D,EAAAgE,KACA,OAEA,IAAAU,EAAkBG,EAAcrB,GAChC,IAAAkB,EAAA,CACA,MAAAV,EAAAR,EAAAG,QAAA,eAAAH,EAAAI,MAAA,sBACAc,EAAA,IAAoBT,EAAI,CAAEnG,OAAA0F,EAAA1F,OAAAkC,OAAA,CAAoCgE,UAE9DR,EAAAb,MAAA,MACAa,EAAAI,MAAA,qCAEAc,EAAAxG,KAAA,aACA8B,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,sBACA5D,EAAA0F,OAAAlC,EAAAG,QAAA,MAAAH,EAAAI,MAAA,gCACA5D,EAAAlD,MAAmB8I,EAAWpC,MAAAI,MAAA,uBAC9B5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,oCAC7C,MAAAG,EAAA,IAAoBiG,EAAQ,CAAElM,OAAA0F,EAAA1F,OAAAkC,WAE9B,OADA+D,EAAAW,UACAX,EAGA7F,WACA,cAEA9B,WACA,OAAA6N,SAAAxO,KAAAuE,OAAA5D,KAAAU,OAEAA,YACA,OAAWgJ,EAAUrK,KAAAuE,OAAAlD,QCpCd,MAAMoN,UAAqBrH,EAIlC3B,aAAAsC,GACA,MAAA4D,EAAA5D,EAAAzF,SACAiC,EAAA,GACA+D,EAAA,IAAoBmG,EAAY,CAAEpM,OAAA0F,EAAA1F,OAAAkC,WAKlC,GAJAA,EAAAoI,SAAA5E,EAAAG,QAAA,YACA3D,EAAAgE,KAAAhE,EAAAoI,SACA5E,EAAAG,QAAA,qBACAH,EAAAG,QAAA,iCACA3D,EAAAgE,KAEA,YADAR,EAAAgE,UAAAJ,GAIA,MAAAlJ,KAAWA,GAAO6F,EAClBoG,EAAA,YAAAjM,EACAkM,EAAAD,GAAA,aAAAjM,EAEA8B,EAAAkE,KAAAV,EAAAG,QAAA,MAAAH,EAAAI,yCAA+F1F,iBAC/F,MAAAoK,EAAkBjE,EAA6Bb,MAAAI,oCAA6D1F,iBAa5G,OAZA6F,EAAAW,QAAA,CAAA4D,GACA8B,IACA9B,EAAAtI,OAAAwE,UAAAhB,EAAAG,QAAA,KACA2E,EAAAtI,OAAAwE,UACAT,EAAAW,QAAAxC,KAAyBmC,EAA6Bb,IAEtD2G,GACA3G,EAAAI,yCAAyD1F,kBAEzD8B,EAAA2E,MAAAnB,EAAAG,QAAA,MAAAH,EAAAI,4CAAmG1F,iBACnG8B,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,iCAAiD1F,iBAE9F6F,EAGA7F,WACA,OAAAzC,KAAAuE,OAAAgE,KAAAlH,MAEAsL,eACA,QAAA3M,KAAAuE,OAAAoI,UCjCA,SAAAiC,EAAA7G,GACA,MAAAqE,EAAArE,EAAAG,QAAA,UACA,GAAAkE,EAIA,OAHiBK,EAASnB,MAAAvD,EAAA,CAAmBqE,aACzCD,EAASb,MAAAvD,EAAA,CAAmBqE,aAChCrE,EAAAI,MAAA,4BAIO,MAAM0G,UAAkBhB,EAI/BpI,aAAAsC,EAAAQ,GAAApE,QAAiCA,EAAA,MAAiB,IAClD,MAAAI,EAAA,CAAoBJ,UAAAoE,QACpB,OAAWsF,EAASvC,MAAAvD,EAAA,IAAsB8G,EAAS,CAAExM,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACxF9B,KAAA,YACAsL,aAAA5J,EACA6J,eAAA,CACA,CAASO,EAAQjD,OACjB,CAAAsD,GACA,CAASxB,GACT,CAASqB,EAAYnD,OACrB,CAASmB,EAASnB,OAClB,CAASa,EAASb,UAKlB7I,WACA,kBAGAgD,UAAAuG,GACA,IAAAhM,KAAAmE,SAAAnE,KAAAyJ,SAAAqF,MAAAC,GAAA,YAAAA,EAAApO,MAAA,CACA,MAAA6B,EAAA,oTAKYgC,EAAexE,KAAAqC,OAAArC,KAAAuE,OAAA5D,KAAAX,KAAAwC,SAE3B2E,MAAAqF,SAAAR,GACAhM,KAAAmE,gBCrDO,UAAA6H,EAAA5L,GACP,MAAA4O,EAAA,IAAAC,IAAAC,EAAA9O,GAAA+C,IAAAgM,KAAAxO,OACAyO,EAAApD,EAAAoD,SAAAnO,IAAAb,EAAAO,OAAA,GACA0O,EAAArD,EAAAsD,SAAArO,IAAAb,EAAAO,OAAA,GACA,UAAA4O,IAAA,IAAAH,KAAAC,GAAA,CACA,MAAAG,EAAAN,EAAAK,SACAE,EAAAD,EAAAR,EAAAO,EAAAnP,GACA,UAAAsP,KAAAF,EACAR,EAAAW,IAAAD,EAAA/O,MAIA,SAAA8O,EAAAD,EAAAI,EAAAL,EAAAhH,GACA,UAAAmH,KAAAF,EAAA,CACA,MAAA7O,KAAaA,GAAO+O,EACpB,GAAA/O,GAAAiP,EAAAC,IAAAlP,GAAA,CACA,MAAA6B,oBAA0C7B,uDAA0D4H,EAAA5H,6CACtF6D,EAAe+K,EAAAlN,OAAAqN,EAAAnL,OAAA5D,KAAA4O,EAAA/M,KAK7B,SAAA0M,EAAA9O,GACA,OAAAA,EAAA8N,QACAnE,OAAA,EAAgBtH,UAAK,cAAAA,ID8BRqN,CAA+B9D,EAAAhM,QElDrC,MAAM+P,UAAclC,EAI3BpI,aAAAsC,EAAAQ,GAAApE,QAAiCA,GAAU,IAC3C,MAAAI,EAAA,CAAoBJ,UAAAoE,QAEpB,GADAhE,EAAAmJ,MAAA3F,EAAAG,QAAA,SACA3D,EAAAmJ,MAGA,OAAWG,EAASvC,MAAAvD,EAAA,IAAsBgI,EAAK,CAAE1N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACpF9B,KAAA,kBACAuL,eAAA,CACA,CAASO,EAAQjD,OACjB,CAAS8B,GACT,CAASX,EAASnB,MAAA,CAASoB,WAAA,IAC3B,CAASP,EAASb,MAAA,CAASe,SAAA,OAK3B5J,WACA,yBCvBO,MAAMuN,UAAc5I,EAI3B3B,aAAAsC,GACA,MAAAxD,EAAA,GACA+D,EAAA,IAAoB0H,EAAK,CAAE3N,OAAA0F,EAAA1F,OAAAkC,WAQ3B,OAPA+D,EAAAmB,SAAmBgC,EAAkBH,MAAAvD,GACrCxD,EAAA0L,SAAAlI,EAAAG,QAAA,YACAI,EAAAW,QAAkBL,EAA6Bb,EAAA,oBAAAA,EAAAI,MAAA,kCAC/C5D,EAAA5D,KAAAoH,EAAAG,QAAA,eAAAH,EAAAI,MAAA,kCACAG,EAAAwD,QAAkB9B,EAAOsB,MAAAvD,GACzBxD,EAAA0L,UAAA3H,EAAAwD,SAAA/D,EAAAI,MAAA,2CACA5D,EAAA+H,YAAAvE,EAAAG,QAAA,MAA6CH,EAAAI,MAAA,gDAC7CG,EAGA7F,WACA,cAEA9B,WACA,OAAWiJ,EAAQ5J,KAAAuE,OAAA5D,KAAAU,OAEnB4O,eACA,QAAAjQ,KAAAuE,OAAA0L,UC1BO,MAAMC,UAAmBrC,EAIhCpI,aAAAsC,GAAA5D,QAA2BA,GAAU,IACrC,MAAAI,EAAA,CAAoBJ,WAEpB,GADAI,EAAAgE,KAAAR,EAAAG,QAAA,cACA3D,EAAAgE,KAGA,OAAWsF,EAASvC,MAAAvD,EAAA,IAAsBmI,EAAU,CAAE7N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACzF9B,KAAA,aACAsL,aAAA5J,EACA6J,eAAA,CACA,CAASgC,EAAK1E,UAKd7I,WACA,oBClBO,MAAM0N,UAAkBtC,EAI/BpI,aAAAsC,GAAA5D,QAA2BA,GAAU,IACrC,MAAAI,EAAA,CAAoBJ,WAEpB,GADAI,EAAAgE,KAAAR,EAAAG,QAAA,aACA3D,EAAAgE,KAGA,OAAWsF,EAASvC,MAAAvD,EAAA,IAAsBoI,EAAS,CAAE9N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CACxF9B,KAAA,YACAuL,eAAA,CACA,CAASvB,EAASnB,MAAA,CAASoB,WAAA,EAAAC,UAAA,IAC3B,CAASR,EAASb,MAAA,CAASe,SAAA,OAK3B5J,WACA,kBAGAgD,UAAAuG,GACA,IAAAhM,KAAAmE,SAAAnE,KAAAyJ,SAAAqF,MAAAC,GAAA,YAAAA,EAAApO,MAAA,CACA,MAAA6B,EAAA,gTAKYgC,EAAexE,KAAAqC,OAAArC,KAAAuE,OAAA5D,KAAAX,KAAAwC,SAE3B2E,MAAAqF,SAAAR,IChCO,MAAMoE,UAA0BvC,EAIvCpI,aAAAsC,EAAAsI,GAAAlM,QAAqCA,EAAA,MAAiB,IACtD,MAAAI,EAAA,CAAoB8L,YAEpB,GADA9L,EAAAgE,KAAAR,EAAAG,QAAA,aACA3D,EAAAgE,KAGA,OAAWsF,EAASvC,MAAAvD,EAAA,IAAsBqI,EAAiB,CAAE/N,OAAA0F,EAAA1F,OAAAkC,WAAmC,CAChG9B,KAAA,qBACAsL,aAAA5J,EACA6J,eAAA,CACA,CAASO,EAAQjD,OACjB,CAASa,EAASb,MAAA,CAASe,SAAA,OAK3B5J,WACA,4BCPA,SAAA6N,EAAAvI,EAAAwI,GACA,MAAAlO,EAAA0F,EAAA1F,OAEA,SAAA8F,EAAAxC,GACAoC,EAAAI,MAAAxC,GAGA,SAAAuC,KAAAjB,GACA,OAAAc,EAAAG,WAAAjB,GAYA,SAAAuJ,EAAAC,GACA,MAAAlI,EAAAL,EAAA,aACA,GAAAK,EAIA,OAHgBwH,EAAKzE,MAAAvD,EAAAQ,EAAAkI,IACf5B,EAASvD,MAAAvD,EAAAQ,EAAAkI,IACftI,EAAA,gCAaA,SAAAuI,IACA,OA5BA,WACA,MAAAL,EAAAnI,EAAA,YACA,GAAAmI,EACA,OAAAtI,EAAAb,MAAA,aACakJ,EAAiB9E,MAAAvD,EAAAsI,GAEnBzC,EAAgBtC,MAAAvD,EAAAsI,GAsB3BA,IACAG,KAXA,WACA,MAAArM,EAAA+D,EAAA,WACA,GAAA/D,EACA,OAAW+L,EAAU5E,MAAAvD,EAAA,CAAmB5D,aACxCqM,EAAA,CAAkBrM,aACZgM,EAAS7E,MAAAvD,EAAA,CAAmB5D,aAClCgE,EAAA,qCAMAhE,IACM+L,EAAU5E,MAAAvD,IACVuF,EAAIhC,MAAAvD,IACJ4F,EAAOrC,MAAAvD,IACPyF,EAAQlC,MAAAvD,IACRoI,EAAS7E,MAAAvD,GAsBf,MAAA4I,EAnBA,WACA,IAAAtO,EAAAkB,OAAA,SACA,MAAAyI,EAAA,GACA,QACA,MAAAmC,EAAiB1C,EAAkBH,MAAAvD,GACnCmC,EAAAwG,IACA,IAAAxG,EAAA,CACAiE,EAAA5K,QAAA4E,EAAA,6BACA,MAEA+B,EAAAT,SAAA0E,EACAnC,EAAAvF,KAAAyD,GAEA,MAAA0G,EAAA1I,EAAA,OAIA,OAHAqI,EAAAM,UACA7E,EAAAvF,KAAAmK,GAEA5E,EAEA8E,GAEA,OADA/I,EAAAzF,SAAAD,EAAAkB,QAAA4E,EAAA,uBACAwI,EAGO,SAAArF,EAAA3F,EAAA4K,EAAA,IACP,MAAAxI,EAAA,IAAwBvC,EAASG,GAIjC,YAHA,IAAA4K,EAAAlM,aACA0D,EAAA1F,OAAA1B,KAAA4P,EAAAlM,YAEAiM,EAAAvI,EAAAwI,GC/FA,SAAAQ,EAAAC,GACA,OAAAA,EAGA,MAAAC,EAAA,CACAC,KAAApE,KAAAzJ,KAAA,IACAD,OAAA2N,EACApQ,KAAAoQ,EACAI,UAAAJ,EACAtO,KAAAsO,EACA1H,QAAA0H,EACAvJ,YAAAuJ,EACAL,WAAAK,EACAK,kBAAAL,EACAM,2BAAAN,GAGO,SAAAO,EAAAC,GAAqBN,UAAAO,EAAAP,GAA4B,IAGxD,SAAAE,EAAAM,GAAAC,UAA2BA,EAAAxN,YAI3B,OAHAwN,IACAA,EAAAD,EAAAjL,WAAA,KAAAiL,EAAA7O,MAAA,GAAA6O,GAEAD,EAAAL,UAAAM,EAAAC,EAAAxN,GAGA,SAAAO,EAAAnD,EAAAqQ,EAAAZ,KAAA1C,GACA,IAAA/M,EACA,SAEA,MAAAD,EAAAsQ,EAAArQ,EAAAD,SAAAgN,GACA,OAAAmD,EAAAN,KAAA,CAAAM,EAAApO,OAAA9B,EAAA8B,QAAA/B,IAGA,SAAAuQ,EAAAtQ,EAAA4C,GACA,OAAAO,EAAAnD,EAAA6P,EAAA,CAAgCjN,YAGhC,SAAA2N,EAAAvQ,EAAA0P,GACA,OAAAvM,EAAAnD,EAAAkQ,EAAA7Q,KAAAqQ,GAGA,SAAAc,EAAAC,GACA,GAAAA,EAAApI,OAAAoI,EAAA1I,QACA,OAAAmI,EAAAN,KAAA,CACAzM,EAAAsN,EAAAxN,OAAAgE,KAAAiJ,EAAAnI,SACA5E,EAAAsN,EAAAxN,OAAAkE,SACAsJ,EAAArJ,QAAAvF,IAAAV,GACAgC,EAAAsN,EAAAxN,OAAA2E,SAGA,MAAA8I,EAAAD,EAAAxN,OAAAsF,QAAAkI,EAAAxN,OAAAgE,KACAsB,EAAAkI,EAAAxN,OAAAsF,OAAA,CACAkI,EAAAxN,OAAAsF,OAAAxI,MACAmQ,EAAApO,OAAA2O,EAAAxN,OAAAgE,KAAAnF,SACA,GACA6O,EAAAd,EAAAK,EAAAN,KAAA,IACArH,EACAkI,EAAAxN,OAAAgE,KAAAlH,MACAoD,EAAAsN,EAAAxN,OAAAuF,WACA,CAAS4H,UAAAK,EAAA9I,QAAA/E,QAAA6N,IACT,OAAAP,EAAAN,KAAA,CAAAM,EAAApO,OAAA4O,EAAA5O,QAAA6O,IAEA,SAAAxP,EAAAsP,GACA,OAAAP,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAqI,EAAAC,GACAtN,EAAAsN,EAAAxN,OAAA0D,UACAxD,EAAAsN,EAAAxN,OAAAwE,aAGA,SAAAoJ,EAAAjI,GACA,OAAAA,EAGAsH,EAAAN,KAAA,CACAzM,EAAAyF,EAAA3F,OAAA0F,WACAC,EAAAE,WAAAjH,IAAA7B,GAAAmD,EAAAnD,MAJA,GAOA,SAAAiL,EAAAyE,GACA,OAAAQ,EAAAN,KAAA,CACAgB,EAAAlB,EAAAvH,UACAhF,EAAAuM,EAAAzM,OAAAqH,UACA4F,EAAA/O,OAAAuO,EAAA/H,UACAxE,EAAAuM,EAAAzM,OAAAsH,UACAgG,EAAAb,EAAAzM,OAAA5D,KAAA,CAAmCyR,KAAApB,IACnCmB,EAAAnB,EAAAlF,SACArH,EAAAuM,EAAAzM,OAAAwE,aASA,SAAAsJ,EAAAN,GACA,MAAAnH,QAAWA,GAAUmH,EAAA1G,OACrB,OAAAmG,EAAAN,KAAA,CACAM,EAAApO,OAAA2O,EAAAxN,OAAA5D,KAAAyC,QACAoO,EAAAJ,kBAAAI,EAAAN,KAAA,CACAM,EAAAH,2BAAAU,EAAApR,MACA8D,EAAAsN,EAAA1G,OAAA9G,OAAA0F,QACA2H,EAAAG,EAAA1G,OAAA9G,OAAAmG,cAAAqH,GACAtN,EAAAsN,EAAA1G,OAAA9G,OAAAkE,SACAsJ,EAAA1G,OAAAV,KACAoH,EAAA1G,OAAAV,KAAAxH,IACA,oBAAAyH,EAAA0H,IAjBA,SAAAA,EAAApO,GACA,OAAAsN,EAAAN,KAAA,CACAU,EAAAU,EAAA/N,OAAAlD,MAAA6C,GACAO,EAAA6N,EAAA/N,OAAAwE,cAcAjE,CAAAwN,EAAAP,GAAAxF,GAFA,GAIA9H,EAAAsN,EAAA1G,OAAA9G,OAAA2E,UAEAzE,EAAAsN,EAAAxN,OAAAwE,aAGA,SAAAmJ,EAAAK,GACA,OAAAA,EAAAhP,OACAiO,EAAAN,KAAA,CACAzM,EAAA8N,EAAAhO,OAAAkE,SACA8J,EAAApP,IAAAkP,GACA5N,EAAA8N,EAAAhO,OAAA2E,SAJA,GA+CA,SAAAsJ,EAAAT,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA8L,UACA5L,EAAAsN,EAAAxN,OAAAJ,SACAM,EAAAsN,EAAAxN,OAAAgE,MACA9D,EAAAsN,EAAAxN,OAAAmJ,OACAmE,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,KAlBlCU,EAmBAV,EAlBAU,EAAAlO,OAAAiD,YAGAgK,EAAAN,KAAA,CACAzM,EAAAgO,EAAAlO,OAAA0J,OACAuD,EAAApO,OAAAqP,EAAAlO,OAAAiD,YAAApE,QACAoO,EAAAhK,YAAA2J,EAAAsB,EAAAlO,OAAAiD,YAAAnG,MAAA,CAA8D6C,QAAAuO,OAL9D,IAkBAhO,EAAAsN,EAAAxN,OAAAkE,MACAiK,EAAAX,EAAA7D,QAAA6D,GACAtN,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,IAxBT,IAAAU,EAoGA,SAAAE,EAAAZ,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAoI,UACAlI,EAAAsN,EAAAxN,OAAAgE,KAAAiJ,EAAAnI,SACA5E,EAAAsN,EAAAxN,OAAAkE,MACA+I,EAAAN,KAAAa,EAAA9I,QAAA9F,IAAAV,IACAgC,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,WApPTpB,EAAA1Q,OAAAmJ,OAAA,GAAuBgH,EAAAO,GA0PvB,MAAAqB,EAAA,CACAC,UAAAN,EACAO,kBAAAP,EACAQ,UAAAR,EACAS,UAnJA,SAAAlB,EAAAa,GACA,MAAAM,EAAAnB,EAAA9I,QAAA,CACAuI,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCnO,EAAAsN,EAAAxN,OAAAkE,MACA+I,EAAAN,KAAAa,EAAAvG,UAAArI,IAAAoJ,IACA9H,EAAAsN,EAAAxN,OAAA2E,QACA,GACA,OAAAsI,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA6H,YACA8G,EACAzO,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YAuITO,UApIA,SAAApB,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA6H,SACA3H,EAAAsN,EAAAxN,OAAAoI,UACAlI,EAAAsN,EAAAxN,OAAAgE,MACAiJ,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCnO,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YA4HTQ,WAAAZ,EACAa,MA/FA,SAAAtB,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAA0L,UACAuB,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCT,EAAAJ,EAAAjG,SACArH,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YAwFTU,MAtFA,SAAAvB,EAAAa,GACA,OAAApB,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAiJ,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,EAAAa,WAClCnO,EAAAsN,EAAAxN,OAAA0F,QACAxF,EAAAsN,EAAAxN,OAAAlD,OACAoD,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,EAAAa,YA8ETW,QA5EA,SAAAxB,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAiJ,EAAA/O,OAAAsP,EAAA9I,UACA4I,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,IAClCtN,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KAsETzL,SApEA,SAAAyL,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAmI,EAAAG,EAAAxN,OAAAkJ,OAAAsE,GACAtN,EAAAsN,EAAAxN,OAAA+B,UACAsL,EAAAG,EAAAxN,OAAAmJ,MAAAqE,GACAtN,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KA8DT1B,SA5DA,SAAA0B,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAsJ,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,IAClCtN,EAAAsN,EAAAxN,OAAA0F,QACAuH,EAAA/O,OAAAsP,EAAA9I,UACAxE,EAAAsN,EAAAxN,OAAAkE,SACAsJ,EAAAvG,UAAArI,IAAAoJ,GACA9H,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KAkDTyB,KAhDA,SAAAzB,GACA,OAAAP,EAAAd,WAAAc,EAAAN,KAAA,CACAgB,EAAAH,EAAAtI,UACAhF,EAAAsN,EAAAxN,OAAAgE,MACAsJ,EAAAE,EAAAxN,OAAA5D,KAAA,CAAkCyR,KAAAL,IAClCtN,EAAAsN,EAAAxN,OAAAkE,MACAiK,EAAAX,EAAAxE,OAAAwE,GACAtN,EAAAsN,EAAAxN,OAAA2E,OACAzE,EAAAsN,EAAAxN,OAAA+H,eACA,CAAS8F,KAAAL,KAwCT0B,aAtCA,SAAAC,EAAAd,GACA,OAAApB,EAAAN,KAAA,CACAM,EAAApO,OAAAsQ,EAAAnP,OAAAlD,MAAA+B,QACAoO,EAAAd,WACAc,EAAAN,KAAA,KAAAM,EAAA7Q,KAAA+S,EAAArS,MAAA,CAAwC+Q,KAAAsB,EAAAd,WAAkB,MAC1D,CAASR,KAAAsB,EAAAd,WAETnO,EAAAiP,EAAAnP,OAAAwE,cAgCA4K,SAAAhB,EACAiB,eAAAjB,EACAkB,QAAAlB,EACAmB,QAAAnB,EACAoB,qBAAAvB,EACA5B,IAvBA,SAAAmB,GACA,OAAAP,EAAApO,OAAA2O,EAAA3O,UA+BA,SAAAsP,EAAAsB,EAAApB,GACA,IAAAoB,EAAA,OACA,MAAAC,EAAAD,EAAA7Q,IAAA+Q,IATA,SAAAnC,EAAAa,GAEA,IADAC,EAAAd,EAAAtP,MAEA,UAAAiE,eAA+BqL,EAAAtP,wBAE/B,OAAAoQ,EAAAd,EAAAtP,MAAAsP,EAAAa,IAIAuB,CAAAD,EAAAtB,IACA,OAAApB,EAAAN,KAAA+C,GAEA,OAAAvB,EAAAnB,GC3SA,SAAA6C,GAAAC,EAAAnI,GACA,MAAA/I,EAAA,IAAAmR,IACAhO,EAAA+N,EAAAtK,OAAAG,GAAA,aAAAA,EAAAzH,MACA,UAAA8R,KAAAjO,EAAA,CACA,MAAAoH,EAAAxB,EAAAjL,IAAAsT,EAAAjO,UACA,IAAAoH,EACA,SAEA,MAAA8G,EAAArR,EAAAlC,IAAAsT,EAAA9G,QACA+G,EACAA,EAAA/N,KAAAiH,GAEAvK,EAAAsR,IAAAF,EAAA9G,OAAA,CAAAC,IAGA,OAAAvK,EA2CA,SAAAuR,GAAAnD,GACA,MAAAvF,EAzCA,SAAAqI,GACA,MAAAnI,EAAA,IAAAoI,IACAK,EAAA,IAAA1F,IACAG,EAAA,IAAAkF,IACA,UAAApK,KAAAmK,EACA,GAAAnK,EAAA/F,QAAA,CACA,MAAAqQ,EAAApF,EAAAnO,IAAAiJ,EAAAvJ,MACA6T,EACAA,EAAA/N,KAAAyD,GAEAkF,EAAAqF,IAAAvK,EAAAvJ,KAAA,CAAAuJ,SAIAA,EAAAvJ,OAGAuL,EAAA2D,IAAA3F,EAAAvJ,MAGAgU,EAAAhF,IAAAzF,GAFAgC,EAAAuI,IAAAvK,EAAAvJ,KAAAuJ,IAKA,OACAmK,MACAnI,SACAkD,WACAuF,aACArF,SAAA8E,GAAAC,EAAAnI,IAaA0I,CAAArD,GACA,UAAArH,KAAA8B,EAAAqI,IACAnK,EAAAsC,iBACAtC,EAAAsC,SAAAR,UAZA,WAAAE,OAAgCA,EAAAyI,eAChC,UAAAE,KAAAF,EAAA,CACA,MAAAhU,KAAWA,GAAOkU,EAClBrS,eAAiC7B,eAAkBuL,EAAAjL,IAAAN,GAAA8B,+BACzC+B,EAAKqQ,EAAAxS,OAAAwS,EAAAtQ,OAAA5D,KAAAkU,EAAArS,IAWfsS,CAAA9I,GAcO,SAAAQ,GAAA+E,GACP,UAAAmD,IAXAF,EAWAjD,EAVAiD,EAAAO,KACAP,EAAAO,OAEA,GAAAzP,UAAAkP,MAJA,IAAAA,ECzEAtU,EAAAQ,EAAAsU,EAAA,0BAAA1J,IAAApL,EAAAQ,EAAAsU,EAAA,0BAAA1D,IAAApR,EAAAQ,EAAAsU,EAAA,6BAAAxI","file":"webidl2.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine([], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"WebIDL2\"] = factory();\n\telse\n\t\troot[\"WebIDL2\"] = factory();\n})(this, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 0);\n","/**\n * @param {string} text\n */\nfunction lastLine(text) {\n const splitted = text.split(\"\\n\");\n return splitted[splitted.length - 1];\n}\n\n/**\n * @param {string} message error message\n * @param {\"Syntax\" | \"Validation\"} type error type\n */\nfunction error(source, position, current, message, type) {\n /**\n * @param {number} count\n */\n function sliceTokens(count) {\n return count > 0 ?\n source.slice(position, position + count) :\n source.slice(Math.max(position + count, 0), position);\n }\n\n function tokensToText(inputs, { precedes } = {}) {\n const text = inputs.map(t => t.trivia + t.value).join(\"\");\n const nextToken = source[position];\n if (nextToken.type === \"eof\") {\n return text;\n }\n if (precedes) {\n return text + nextToken.trivia;\n }\n return text.slice(nextToken.trivia.length);\n }\n\n const maxTokens = 5; // arbitrary but works well enough\n const line =\n source[position].type !== \"eof\" ? source[position].line :\n source.length > 1 ? source[position - 1].line :\n 1;\n\n const precedingLastLine = lastLine(\n tokensToText(sliceTokens(-maxTokens), { precedes: true })\n );\n\n const subsequentTokens = sliceTokens(maxTokens);\n const subsequentText = tokensToText(subsequentTokens);\n const subsequentFirstLine = subsequentText.split(\"\\n\")[0];\n\n const spaced = \" \".repeat(precedingLastLine.length) + \"^\";\n const sourceContext = precedingLastLine + subsequentFirstLine + \"\\n\" + spaced;\n\n const contextType = type === \"Syntax\" ? \"since\" : \"inside\";\n const inSourceName = source.name ? ` in ${source.name}` : \"\";\n const grammaticalContext = current ? `, ${contextType} \\`${current.partial ? \"partial \" : \"\"}${current.type} ${current.name}\\`` : \"\";\n const context = `${type} error at line ${line}${inSourceName}${grammaticalContext}:\\n${sourceContext}`;\n return {\n message: `${context} ${message}`,\n bareMessage: message,\n context,\n line,\n sourceName: source.name,\n input: subsequentText,\n tokens: subsequentTokens\n };\n}\n\n/**\n * @param {string} message error message\n */\nexport function syntaxError(source, position, current, message) {\n return error(source, position, current, message, \"Syntax\");\n}\n\n/**\n * @param {string} message error message\n */\nexport function validationError(source, token, current, message) {\n return error(source, token.index, current, message, \"Validation\");\n}\n","import { syntaxError } from \"./error.js\";\n\n// These regular expressions use the sticky flag so they will only match at\n// the current location (ie. the offset of lastIndex).\nconst tokenRe = {\n // This expression uses a lookahead assertion to catch false matches\n // against integers early.\n \"decimal\": /-?(?=[0-9]*\\.|[0-9]+[eE])(([0-9]+\\.[0-9]*|[0-9]*\\.[0-9]+)([Ee][-+]?[0-9]+)?|[0-9]+[Ee][-+]?[0-9]+)/y,\n \"integer\": /-?(0([Xx][0-9A-Fa-f]+|[0-7]*)|[1-9][0-9]*)/y,\n \"identifier\": /[_-]?[A-Za-z][0-9A-Z_a-z-]*/y,\n \"string\": /\"[^\"]*\"/y,\n \"whitespace\": /[\\t\\n\\r ]+/y,\n \"comment\": /((\\/(\\/.*|\\*([^*]|\\*[^/])*\\*\\/)[\\t\\n\\r ]*)+)/y,\n \"other\": /[^\\t\\n\\r 0-9A-Za-z]/y\n};\n\nexport const stringTypes = [\n \"ByteString\",\n \"DOMString\",\n \"USVString\"\n];\n\nexport const argumentNameKeywords = [\n \"attribute\",\n \"callback\",\n \"const\",\n \"deleter\",\n \"dictionary\",\n \"enum\",\n \"getter\",\n \"includes\",\n \"inherit\",\n \"interface\",\n \"iterable\",\n \"maplike\",\n \"namespace\",\n \"partial\",\n \"required\",\n \"setlike\",\n \"setter\",\n \"static\",\n \"stringifier\",\n \"typedef\",\n \"unrestricted\"\n];\n\nconst nonRegexTerminals = [\n \"-Infinity\",\n \"FrozenArray\",\n \"Infinity\",\n \"NaN\",\n \"Promise\",\n \"boolean\",\n \"byte\",\n \"double\",\n \"false\",\n \"float\",\n \"implements\",\n \"legacyiterable\",\n \"long\",\n \"mixin\",\n \"null\",\n \"octet\",\n \"optional\",\n \"or\",\n \"readonly\",\n \"record\",\n \"sequence\",\n \"short\",\n \"true\",\n \"unsigned\",\n \"void\"\n].concat(argumentNameKeywords, stringTypes);\n\nconst punctuations = [\n \"(\",\n \")\",\n \",\",\n \"...\",\n \":\",\n \";\",\n \"<\",\n \"=\",\n \">\",\n \"?\",\n \"[\",\n \"]\",\n \"{\",\n \"}\"\n];\n\n/**\n * @param {string} str\n */\nfunction tokenise(str) {\n const tokens = [];\n let lastCharIndex = 0;\n let trivia = \"\";\n let line = 1;\n let index = 0;\n while (lastCharIndex < str.length) {\n const nextChar = str.charAt(lastCharIndex);\n let result = -1;\n\n if (/[\\t\\n\\r ]/.test(nextChar)) {\n result = attemptTokenMatch(\"whitespace\", { noFlushTrivia: true });\n } else if (nextChar === '/') {\n result = attemptTokenMatch(\"comment\", { noFlushTrivia: true });\n }\n\n if (result !== -1) {\n const currentTrivia = tokens.pop().value;\n line += (currentTrivia.match(/\\n/g) || []).length;\n trivia += currentTrivia;\n index -= 1;\n } else if (/[-0-9.A-Z_a-z]/.test(nextChar)) {\n result = attemptTokenMatch(\"decimal\");\n if (result === -1) {\n result = attemptTokenMatch(\"integer\");\n }\n if (result === -1) {\n result = attemptTokenMatch(\"identifier\");\n const token = tokens[tokens.length - 1];\n if (result !== -1 && nonRegexTerminals.includes(token.value)) {\n token.type = token.value;\n }\n }\n } else if (nextChar === '\"') {\n result = attemptTokenMatch(\"string\");\n }\n\n for (const punctuation of punctuations) {\n if (str.startsWith(punctuation, lastCharIndex)) {\n tokens.push({ type: punctuation, value: punctuation, trivia, line, index });\n trivia = \"\";\n lastCharIndex += punctuation.length;\n result = lastCharIndex;\n break;\n }\n }\n\n // other as the last try\n if (result === -1) {\n result = attemptTokenMatch(\"other\");\n }\n if (result === -1) {\n throw new Error(\"Token stream not progressing\");\n }\n lastCharIndex = result;\n index += 1;\n }\n\n // remaining trivia as eof\n tokens.push({\n type: \"eof\",\n value: \"\",\n trivia\n });\n\n return tokens;\n\n /**\n * @param {keyof tokenRe} type\n * @param {object} [options]\n * @param {boolean} [options.noFlushTrivia]\n */\n function attemptTokenMatch(type, { noFlushTrivia } = {}) {\n const re = tokenRe[type];\n re.lastIndex = lastCharIndex;\n const result = re.exec(str);\n if (result) {\n tokens.push({ type, value: result[0], trivia, line, index });\n if (!noFlushTrivia) {\n trivia = \"\";\n }\n return re.lastIndex;\n }\n return -1;\n }\n}\n\nexport class Tokeniser {\n /**\n * @param {string} idl\n */\n constructor(idl) {\n this.source = tokenise(idl);\n this.position = 0;\n }\n\n /**\n * @param {string} message\n */\n error(message) {\n throw new WebIDLParseError(syntaxError(this.source, this.position, this.current, message));\n }\n\n /**\n * @param {string} type\n */\n probe(type) {\n return this.source.length > this.position && this.source[this.position].type === type;\n }\n\n /**\n * @param {...string} candidates\n */\n consume(...candidates) {\n for (const type of candidates) {\n if (!this.probe(type)) continue;\n const token = this.source[this.position];\n this.position++;\n return token;\n }\n }\n\n /**\n * @param {number} position\n */\n unconsume(position) {\n this.position = position;\n }\n}\n\nclass WebIDLParseError extends Error {\n constructor({ message, bareMessage, context, line, sourceName, input, tokens }) {\n super(message);\n\n this.name = \"WebIDLParseError\"; // not to be mangled\n this.bareMessage = bareMessage;\n this.context = context;\n this.line = line;\n this.sourceName = sourceName;\n this.input = input;\n this.tokens = tokens;\n }\n}\n","export class Base {\n constructor({ source, tokens }) {\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens }\n });\n }\n\n toJSON() {\n const json = { type: undefined, name: undefined, inheritance: undefined };\n let proto = this;\n while (proto !== Object.prototype) {\n const descMap = Object.getOwnPropertyDescriptors(proto);\n for (const [key, value] of Object.entries(descMap)) {\n if (value.enumerable || value.get) {\n json[key] = this[key];\n }\n }\n proto = Object.getPrototypeOf(proto);\n }\n return json;\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape, type_with_extended_attributes, return_type, primitive_type } from \"./helpers.js\";\nimport { stringTypes } from \"../tokeniser.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction generic_type(tokeniser, typeName) {\n const base = tokeniser.consume(\"FrozenArray\", \"Promise\", \"sequence\", \"record\");\n if (!base) {\n return;\n }\n const ret = new Type({ source: tokeniser.source, tokens: { base } });\n ret.tokens.open = tokeniser.consume(\"<\") || tokeniser.error(`No opening bracket after ${base.type}`);\n switch (base.type) {\n case \"Promise\": {\n if (tokeniser.probe(\"[\")) tokeniser.error(\"Promise type cannot have extended attribute\");\n const subtype = return_type(tokeniser, typeName) || tokeniser.error(\"Missing Promise subtype\");\n ret.subtype.push(subtype);\n break;\n }\n case \"sequence\":\n case \"FrozenArray\": {\n const subtype = type_with_extended_attributes(tokeniser, typeName) || tokeniser.error(`Missing ${base.type} subtype`);\n ret.subtype.push(subtype);\n break;\n }\n case \"record\": {\n if (tokeniser.probe(\"[\")) tokeniser.error(\"Record key cannot have extended attribute\");\n const keyType = tokeniser.consume(...stringTypes) || tokeniser.error(`Record key must be one of: ${stringTypes.join(\", \")}`);\n const keyIdlType = new Type({ source: tokeniser.source, tokens: { base: keyType }});\n keyIdlType.tokens.separator = tokeniser.consume(\",\") || tokeniser.error(\"Missing comma after record key type\");\n keyIdlType.type = typeName;\n const valueType = type_with_extended_attributes(tokeniser, typeName) || tokeniser.error(\"Error parsing generic type record\");\n ret.subtype.push(keyIdlType, valueType);\n break;\n }\n }\n if (!ret.idlType) tokeniser.error(`Error parsing generic type ${base.type}`);\n ret.tokens.close = tokeniser.consume(\">\") || tokeniser.error(`Missing closing bracket after ${base.type}`);\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction type_suffix(tokeniser, obj) {\n const nullable = tokeniser.consume(\"?\");\n if (nullable) {\n obj.tokens.nullable = nullable;\n }\n if (tokeniser.probe(\"?\")) tokeniser.error(\"Can't nullable more than once\");\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nfunction single_type(tokeniser, typeName) {\n let ret = generic_type(tokeniser, typeName) || primitive_type(tokeniser);\n if (!ret) {\n const base = tokeniser.consume(\"identifier\", ...stringTypes);\n if (!base) {\n return;\n }\n ret = new Type({ source: tokeniser.source, tokens: { base } });\n if (tokeniser.probe(\"<\")) tokeniser.error(`Unsupported generic type ${base.value}`);\n }\n if (ret.generic === \"Promise\" && tokeniser.probe(\"?\")) {\n tokeniser.error(\"Promise type cannot be nullable\");\n }\n ret.type = typeName || null;\n type_suffix(tokeniser, ret);\n if (ret.nullable && ret.idlType === \"any\") tokeniser.error(\"Type `any` cannot be made nullable\");\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} type\n */\nfunction union_type(tokeniser, type) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"(\");\n if (!tokens.open) return;\n const ret = new Type({ source: tokeniser.source, tokens });\n ret.type = type || null;\n while (true) {\n const typ = type_with_extended_attributes(tokeniser) || tokeniser.error(\"No type after open parenthesis or 'or' in union type\");\n if (typ.idlType === \"any\") tokeniser.error(\"Type `any` cannot be included in a union type\");\n ret.subtype.push(typ);\n const or = tokeniser.consume(\"or\");\n if (or) {\n typ.tokens.separator = or;\n }\n else break;\n }\n if (ret.idlType.length < 2) {\n tokeniser.error(\"At least two types are expected in a union type but found less\");\n }\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unterminated union type\");\n type_suffix(tokeniser, ret);\n return ret;\n}\n\nexport class Type extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\n static parse(tokeniser, typeName) {\n return single_type(tokeniser, typeName) || union_type(tokeniser, typeName);\n }\n\n constructor({ source, tokens }) {\n super({ source, tokens });\n Object.defineProperty(this, \"subtype\", { value: [] });\n this.extAttrs = [];\n }\n\n get generic() {\n if (this.subtype.length && this.tokens.base) {\n return this.tokens.base.value;\n }\n return \"\";\n }\n get nullable() {\n return Boolean(this.tokens.nullable);\n }\n get union() {\n return Boolean(this.subtype.length) && !this.tokens.base;\n }\n get idlType() {\n if (this.subtype.length) {\n return this.subtype;\n }\n // Adding prefixes/postfixes for \"unrestricted float\", etc.\n const name = [\n this.tokens.prefix,\n this.tokens.base,\n this.tokens.postfix\n ].filter(t => t).map(t => t.value).join(\" \");\n return unescape(name);\n }\n}\n","import { Base } from \"./base.js\";\nimport { const_data, const_value } from \"./helpers.js\";\n\nexport class Default extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const assign = tokeniser.consume(\"=\");\n if (!assign) {\n return null;\n }\n const def = const_value(tokeniser) || tokeniser.consume(\"string\", \"null\", \"[\", \"{\") || tokeniser.error(\"No value for default\");\n const expression = [def];\n if (def.type === \"[\") {\n const close = tokeniser.consume(\"]\") || tokeniser.error(\"Default sequence value must be empty\");\n expression.push(close);\n } else if (def.type === \"{\") {\n const close = tokeniser.consume(\"}\") || tokeniser.error(\"Default dictionary value must be empty\");\n expression.push(close);\n }\n return new Default({ source: tokeniser.source, tokens: { assign }, expression });\n }\n\n constructor({ source, tokens, expression }) {\n super({ source, tokens });\n Object.defineProperty(this, \"expression\", { value: expression });\n }\n\n get type() {\n return const_data(this.expression[0]).type;\n }\n get value() {\n return const_data(this.expression[0]).value;\n }\n get negative() {\n return const_data(this.expression[0]).negative;\n }\n}\n","export class ArrayBase extends Array {\n constructor({ source, tokens }) {\n super();\n Object.defineProperties(this, {\n source: { value: source },\n tokens: { value: tokens }\n });\n }\n}\n","import { Base } from \"./base.js\";\nimport { ArrayBase } from \"./array-base.js\";\nimport { list, identifiers, argument_list } from \"./helpers.js\";\n\nclass ExtendedAttributeParameters extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = { assign: tokeniser.consume(\"=\") };\n const ret = new ExtendedAttributeParameters({ source: tokeniser.source, tokens });\n if (tokens.assign) {\n tokens.secondaryName = tokeniser.consume(\"identifier\", \"decimal\", \"integer\", \"string\");\n }\n tokens.open = tokeniser.consume(\"(\");\n if (tokens.open) {\n ret.list = ret.rhsType === \"identifier-list\" ?\n // [Exposed=(Window,Worker)]\n identifiers(tokeniser) :\n // [NamedConstructor=Audio(DOMString src)] or [Constructor(DOMString str)]\n argument_list(tokeniser);\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unexpected token in extended attribute argument list\");\n } else if (ret.hasRhs && !tokens.secondaryName) {\n tokeniser.error(\"No right hand side to extended attribute assignment\");\n }\n return ret;\n }\n\n get rhsType() {\n return !this.tokens.assign ? null :\n !this.tokens.secondaryName ? \"identifier-list\" :\n this.tokens.secondaryName.type;\n }\n}\n\nclass SimpleExtendedAttribute extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const name = tokeniser.consume(\"identifier\");\n if (name) {\n return new SimpleExtendedAttribute({\n tokens: { name },\n params: ExtendedAttributeParameters.parse(tokeniser)\n });\n }\n }\n\n constructor({ source, tokens, params }) {\n super({ source, tokens });\n Object.defineProperty(this, \"params\", { value: params });\n }\n\n get type() {\n return \"extended-attribute\";\n }\n get name() {\n return this.tokens.name.value;\n }\n get rhs() {\n const { rhsType: type, tokens, list } = this.params;\n if (!type) {\n return null;\n }\n const value = type === \"identifier-list\" ? list : tokens.secondaryName.value;\n return { type, value };\n }\n get arguments() {\n const { rhsType, list } = this.params;\n if (!list || rhsType === \"identifier-list\") {\n return [];\n }\n return list;\n }\n}\n\n// Note: we parse something simpler than the official syntax. It's all that ever\n// seems to be used\nexport class ExtendedAttributes extends ArrayBase {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.open = tokeniser.consume(\"[\");\n if (!tokens.open) return [];\n const ret = new ExtendedAttributes({ source: tokeniser.source, tokens });\n ret.push(...list(tokeniser, {\n parser: SimpleExtendedAttribute.parse,\n listName: \"extended attribute\"\n }));\n tokens.close = tokeniser.consume(\"]\") || tokeniser.error(\"Unexpected closing token of extended attribute\");\n if (!ret.length) {\n tokeniser.error(\"Found an empty extended attribute\");\n }\n if (tokeniser.probe(\"[\")) {\n tokeniser.error(\"Illegal double extended attribute lists, consider merging them\");\n }\n return ret;\n }\n}\n","import { Type } from \"./type.js\";\nimport { Argument } from \"./argument.js\";\nimport { Token } from \"./token.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { Operation } from \"./operation.js\";\nimport { Attribute } from \"./attribute.js\";\n\n/**\n * @param {string} identifier\n */\nexport function unescape(identifier) {\n return identifier.startsWith('_') ? identifier.slice(1) : identifier;\n}\n\n/**\n * Parses comma-separated list\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {object} args\n * @param {Function} args.parser parser function for each item\n * @param {boolean} [args.allowDangler] whether to allow dangling comma\n * @param {string} [args.listName] the name to be shown on error messages\n */\nexport function list(tokeniser, { parser, allowDangler, listName = \"list\" }) {\n const first = parser(tokeniser);\n if (!first) {\n return [];\n }\n first.tokens.separator = tokeniser.consume(\",\");\n const items = [first];\n while (first.tokens.separator) {\n const item = parser(tokeniser);\n if (!item) {\n if (!allowDangler) {\n tokeniser.error(`Trailing comma in ${listName}`);\n }\n break;\n }\n item.tokens.separator = tokeniser.consume(\",\");\n items.push(item);\n if (!item.tokens.separator) break;\n }\n return items;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function const_value(tokeniser) {\n return tokeniser.consume(\"true\", \"false\", \"Infinity\", \"-Infinity\", \"NaN\", \"decimal\", \"integer\");\n}\n\n/**\n * @param {object} token\n * @param {string} token.type\n * @param {string} token.value\n */\nexport function const_data({ type, value }) {\n switch (type) {\n case \"true\":\n case \"false\":\n return { type: \"boolean\", value: type === \"true\" };\n case \"Infinity\":\n case \"-Infinity\":\n return { type: \"Infinity\", negative: type.startsWith(\"-\") };\n case \"[\":\n return { type: \"sequence\", value: [] };\n case \"{\":\n return { type: \"dictionary\" };\n case \"decimal\":\n case \"integer\":\n return { type: \"number\", value };\n case \"string\":\n return { type: \"string\", value: value.slice(1, -1) };\n default:\n return { type };\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function primitive_type(tokeniser) {\n function integer_type() {\n const prefix = tokeniser.consume(\"unsigned\");\n const base = tokeniser.consume(\"short\", \"long\");\n if (base) {\n const postfix = tokeniser.consume(\"long\");\n return new Type({ source, tokens: { prefix, base, postfix } });\n }\n if (prefix) tokeniser.error(\"Failed to parse integer type\");\n }\n\n function decimal_type() {\n const prefix = tokeniser.consume(\"unrestricted\");\n const base = tokeniser.consume(\"float\", \"double\");\n if (base) {\n return new Type({ source, tokens: { prefix, base } });\n }\n if (prefix) tokeniser.error(\"Failed to parse float type\");\n }\n\n const { source } = tokeniser;\n const num_type = integer_type(tokeniser) || decimal_type(tokeniser);\n if (num_type) return num_type;\n const base = tokeniser.consume(\"boolean\", \"byte\", \"octet\");\n if (base) {\n return new Type({ source, tokens: { base } });\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function identifiers(tokeniser) {\n const ids = list(tokeniser, { parser: Token.parser(tokeniser, \"identifier\"), listName: \"identifier list\" });\n if (!ids.length) {\n tokeniser.error(\"Expected identifiers but none found\");\n }\n return ids;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function argument_list(tokeniser) {\n return list(tokeniser, { parser: Argument.parse, listName: \"arguments list\" });\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nexport function type_with_extended_attributes(tokeniser, typeName) {\n const extAttrs = ExtendedAttributes.parse(tokeniser);\n const ret = Type.parse(tokeniser, typeName);\n if (ret) ret.extAttrs = extAttrs;\n return ret;\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} typeName\n */\nexport function return_type(tokeniser, typeName) {\n const typ = Type.parse(tokeniser, typeName || \"return-type\");\n if (typ) {\n return typ;\n }\n const voidToken = tokeniser.consume(\"void\");\n if (voidToken) {\n const ret = new Type({ source: tokeniser.source, tokens: { base: voidToken } });\n ret.type = \"return-type\";\n return ret;\n }\n}\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nexport function stringifier(tokeniser) {\n const special = tokeniser.consume(\"stringifier\");\n if (!special) return;\n const member = Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"Unterminated stringifier\");\n return member;\n}\n","import { Base } from \"./base.js\";\nimport { Default } from \"./default.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { unescape, type_with_extended_attributes } from \"./helpers.js\";\nimport { argumentNameKeywords } from \"../tokeniser.js\";\nimport { validationError } from \"../error.js\";\nimport { idlTypeIncludesDictionary } from \"../validators/helpers.js\";\n\nexport class Argument extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n const tokens = {};\n const ret = new Argument({ source: tokeniser.source, tokens });\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.optional = tokeniser.consume(\"optional\");\n ret.idlType = type_with_extended_attributes(tokeniser, \"argument-type\");\n if (!ret.idlType) {\n return tokeniser.unconsume(start_position);\n }\n if (!tokens.optional) {\n tokens.variadic = tokeniser.consume(\"...\");\n }\n tokens.name = tokeniser.consume(\"identifier\", ...argumentNameKeywords);\n if (!tokens.name) {\n return tokeniser.unconsume(start_position);\n }\n ret.default = tokens.optional ? Default.parse(tokeniser) : null;\n return ret;\n }\n\n get type() {\n return \"argument\";\n }\n get optional() {\n return !!this.tokens.optional;\n }\n get variadic() {\n return !!this.tokens.variadic;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n\n *validate(defs) {\n if (idlTypeIncludesDictionary(this.idlType, defs)) {\n if (this.optional && !this.default) {\n const message = `Optional dictionary arguments must have a default value of \\`{}\\`.`;\n yield validationError(this.source, this.tokens.name, this, message);\n }\n }\n }\n}\n","export function idlTypeIncludesDictionary(idlType, defs) {\n if (!idlType.union) {\n const def = defs.unique.get(idlType.idlType);\n if (!def) {\n return false;\n }\n if (def.type === \"typedef\") {\n return idlTypeIncludesDictionary(def.idlType, defs);\n }\n return def.type === \"dictionary\";\n }\n for (const subtype of idlType.subtype) {\n if (idlTypeIncludesDictionary(subtype, defs)) {\n return true;\n }\n }\n return false;\n}\n","import { Base } from \"./base.js\";\n\nexport class Token extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n * @param {string} type\n */\n static parser(tokeniser, type) {\n return () => {\n const value = tokeniser.consume(type);\n if (value) {\n return new Token({ source: tokeniser.source, tokens: { value } });\n }\n };\n }\n\n get value() {\n return this.tokens.value.value;\n }\n}\n","import { Base } from \"./base.js\";\nimport { return_type, argument_list, unescape } from \"./helpers.js\";\n\nexport class Operation extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { special, regular } = {}) {\n const tokens = { special };\n const ret = new Operation({ source: tokeniser.source, tokens });\n if (special && special.value === \"stringifier\") {\n tokens.termination = tokeniser.consume(\";\");\n if (tokens.termination) {\n ret.arguments = [];\n return ret;\n }\n }\n if (!special && !regular) {\n tokens.special = tokeniser.consume(\"getter\", \"setter\", \"deleter\");\n }\n ret.idlType = return_type(tokeniser) || tokeniser.error(\"Missing return type\");\n tokens.name = tokeniser.consume(\"identifier\");\n tokens.open = tokeniser.consume(\"(\") || tokeniser.error(\"Invalid operation\");\n ret.arguments = argument_list(tokeniser);\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unterminated operation\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated operation, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"operation\";\n }\n get name() {\n const { name } = this.tokens;\n if (!name) {\n return \"\";\n }\n return unescape(name.value);\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n\n *validate(defs) {\n for (const argument of this.arguments) {\n yield* argument.validate(defs);\n }\n }\n}\n","import { Base } from \"./base.js\";\nimport { type_with_extended_attributes, unescape } from \"./helpers.js\";\n\nexport class Attribute extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { special, noInherit = false, readonly = false } = {}) {\n const start_position = tokeniser.position;\n const tokens = { special };\n const ret = new Attribute({ source: tokeniser.source, tokens });\n if (!special && !noInherit) {\n tokens.special = tokeniser.consume(\"inherit\");\n }\n if (ret.special === \"inherit\" && tokeniser.probe(\"readonly\")) {\n tokeniser.error(\"Inherited attributes cannot be read-only\");\n }\n tokens.readonly = tokeniser.consume(\"readonly\");\n if (readonly && !tokens.readonly && tokeniser.probe(\"attribute\")) {\n tokeniser.error(\"Attributes must be readonly in this context\");\n }\n tokens.base = tokeniser.consume(\"attribute\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n ret.idlType = type_with_extended_attributes(tokeniser, \"attribute-type\") || tokeniser.error(\"Attribute lacks a type\");\n switch (ret.idlType.generic) {\n case \"sequence\":\n case \"record\": tokeniser.error(`Attributes cannot accept ${ret.idlType.generic} types`);\n }\n tokens.name = tokeniser.consume(\"identifier\", \"required\") || tokeniser.error(\"Attribute lacks a name\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated attribute, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"attribute\";\n }\n get special() {\n if (!this.tokens.special) {\n return \"\";\n }\n return this.tokens.special.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { list, unescape } from \"./helpers.js\";\nimport { Token } from \"./token.js\";\nimport { Base } from \"./base.js\";\n\nclass EnumValue extends Token {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const value = tokeniser.consume(\"string\");\n if (value) {\n return new EnumValue({ source: tokeniser.source, tokens: { value } });\n }\n }\n\n get type() {\n return \"enum-value\";\n }\n get value() {\n return super.value.slice(1, -1);\n }\n}\n\nexport class Enum extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.base = tokeniser.consume(\"enum\");\n if (!tokens.base) {\n return;\n }\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"No name for enum\");\n const ret = tokeniser.current = new Enum({ source: tokeniser.source, tokens });\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(\"Bodyless enum\");\n ret.values = list(tokeniser, {\n parser: EnumValue.parse,\n allowDangler: true,\n listName: \"enumeration\"\n });\n if (tokeniser.probe(\"string\")) {\n tokeniser.error(\"No comma between enum values\");\n }\n tokens.close = tokeniser.consume(\"}\") || tokeniser.error(\"Unexpected value in enum\");\n if (!ret.values.length) {\n tokeniser.error(\"No value in enum\");\n }\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"No semicolon after enum\");\n return ret;\n }\n\n get type() {\n return \"enum\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape } from \"./helpers.js\";\n\nexport class Includes extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const target = tokeniser.consume(\"identifier\");\n if (!target) {\n return;\n }\n const tokens = { target };\n tokens.includes = tokeniser.consume(\"includes\");\n if (!tokens.includes) {\n tokeniser.unconsume(target.index);\n return;\n }\n tokens.mixin = tokeniser.consume(\"identifier\") || tokeniser.error(\"Incomplete includes statement\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"No terminating ; for includes statement\");\n return new Includes({ source: tokeniser.source, tokens });\n }\n\n get type() {\n return \"includes\";\n }\n get target() {\n return unescape(this.tokens.target.value);\n }\n get includes() {\n return unescape(this.tokens.mixin.value);\n }\n}\n","import { Base } from \"./base\";\nimport { type_with_extended_attributes, unescape } from \"./helpers\";\n\nexport class Typedef extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n const ret = new Typedef({ source: tokeniser.source, tokens });\n tokens.base = tokeniser.consume(\"typedef\");\n if (!tokens.base) {\n return;\n }\n ret.idlType = type_with_extended_attributes(tokeniser, \"typedef-type\") || tokeniser.error(\"Typedef lacks a type\");\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Typedef lacks a name\");\n tokeniser.current = ret;\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated typedef, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"typedef\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base\";\nimport { return_type, argument_list, unescape } from \"./helpers\";\n\nexport class CallbackFunction extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base) {\n const tokens = { base };\n const ret = new CallbackFunction({ source: tokeniser.source, tokens });\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Callback lacks a name\");\n tokeniser.current = ret;\n tokens.assign = tokeniser.consume(\"=\") || tokeniser.error(\"Callback lacks an assignment\");\n ret.idlType = return_type(tokeniser) || tokeniser.error(\"Callback lacks a return type\");\n tokens.open = tokeniser.consume(\"(\") || tokeniser.error(\"Callback lacks parentheses for arguments\");\n ret.arguments = argument_list(tokeniser);\n tokens.close = tokeniser.consume(\")\") || tokeniser.error(\"Unterminated callback\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated callback, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"callback\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n}\n","import { Base } from \"./base.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { unescape } from \"./helpers.js\";\n\n/**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\nfunction inheritance(tokeniser) {\n const colon = tokeniser.consume(\":\");\n if (!colon) {\n return {};\n }\n const inheritance = tokeniser.consume(\"identifier\") || tokeniser.error(\"Inheritance lacks a type\");\n return { colon, inheritance };\n}\n\nexport class Container extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n * @param {*} instance\n * @param {*} args\n */\n static parse(tokeniser, instance, { type, inheritable, allowedMembers }) {\n const { tokens } = instance;\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(`Missing name in ${instance.type}`);\n tokeniser.current = instance;\n if (inheritable) {\n Object.assign(tokens, inheritance(tokeniser));\n }\n tokens.open = tokeniser.consume(\"{\") || tokeniser.error(`Bodyless ${type}`);\n instance.members = [];\n while (true) {\n tokens.close = tokeniser.consume(\"}\");\n if (tokens.close) {\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(`Missing semicolon after ${type}`);\n return instance;\n }\n const ea = ExtendedAttributes.parse(tokeniser);\n let mem;\n for (const [parser, ...args] of allowedMembers) {\n mem = parser(tokeniser, ...args);\n if (mem) {\n break;\n }\n }\n if (!mem) {\n tokeniser.error(\"Unknown member\");\n }\n mem.extAttrs = ea;\n instance.members.push(mem);\n }\n }\n\n get partial() {\n return !!this.tokens.partial;\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get inheritance() {\n if (!this.tokens.inheritance) {\n return null;\n }\n return unescape(this.tokens.inheritance.value);\n }\n\n *validate(defs) {\n for (const member of this.members) {\n if (member.validate) {\n yield* member.validate(defs);\n }\n }\n }\n }\n","import { Base } from \"./base.js\";\nimport { Type } from \"./type.js\";\nimport { const_data, const_value, primitive_type } from \"./helpers.js\";\n\nexport class Constant extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n tokens.base = tokeniser.consume(\"const\");\n if (!tokens.base) {\n return;\n }\n let idlType = primitive_type(tokeniser);\n if (!idlType) {\n const base = tokeniser.consume(\"identifier\") || tokeniser.error(\"Const lacks a type\");\n idlType = new Type({ source: tokeniser.source, tokens: { base } });\n }\n if (tokeniser.probe(\"?\")) {\n tokeniser.error(\"Unexpected nullable constant type\");\n }\n idlType.type = \"const-type\";\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Const lacks a name\");\n tokens.assign = tokeniser.consume(\"=\") || tokeniser.error(\"Const lacks value assignment\");\n tokens.value = const_value(tokeniser) || tokeniser.error(\"Const lacks a value\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated const, expected `;`\");\n const ret = new Constant({ source: tokeniser.source, tokens });\n ret.idlType = idlType;\n return ret;\n }\n\n get type() {\n return \"const\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get value() {\n return const_data(this.tokens.value);\n }\n}\n","import { Base } from \"./base\";\nimport { type_with_extended_attributes } from \"./helpers\";\n\nexport class IterableLike extends Base {\n /**\n * @param {import(\"../tokeniser.js\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const start_position = tokeniser.position;\n const tokens = {};\n const ret = new IterableLike({ source: tokeniser.source, tokens });\n tokens.readonly = tokeniser.consume(\"readonly\");\n tokens.base = tokens.readonly ?\n tokeniser.consume(\"maplike\", \"setlike\") :\n tokeniser.consume(\"iterable\", \"maplike\", \"setlike\");\n if (!tokens.base) {\n tokeniser.unconsume(start_position);\n return;\n }\n\n const { type } = ret;\n const secondTypeRequired = type === \"maplike\";\n const secondTypeAllowed = secondTypeRequired || type === \"iterable\";\n\n tokens.open = tokeniser.consume(\"<\") || tokeniser.error(`Missing less-than sign \\`<\\` in ${type} declaration`);\n const first = type_with_extended_attributes(tokeniser) || tokeniser.error(`Missing a type argument in ${type} declaration`);\n ret.idlType = [first];\n if (secondTypeAllowed) {\n first.tokens.separator = tokeniser.consume(\",\");\n if (first.tokens.separator) {\n ret.idlType.push(type_with_extended_attributes(tokeniser));\n }\n else if (secondTypeRequired)\n tokeniser.error(`Missing second type argument in ${type} declaration`);\n }\n tokens.close = tokeniser.consume(\">\") || tokeniser.error(`Missing greater-than sign \\`>\\` in ${type} declaration`);\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(`Missing semicolon after ${type} declaration`);\n\n return ret;\n }\n\n get type() {\n return this.tokens.base.value;\n }\n get readonly() {\n return !!this.tokens.readonly;\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\nimport { IterableLike } from \"./iterable.js\";\nimport { stringifier } from \"./helpers.js\";\nimport { validationError } from \"../error.js\";\nimport { checkInterfaceMemberDuplication } from \"../validators/interface.js\";\n\n/**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\nfunction static_member(tokeniser) {\n const special = tokeniser.consume(\"static\");\n if (!special) return;\n const member = Attribute.parse(tokeniser, { special }) ||\n Operation.parse(tokeniser, { special }) ||\n tokeniser.error(\"No body in static member\");\n return member;\n}\n\nexport class Interface extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base, { partial = null } = {}) {\n const tokens = { partial, base };\n return Container.parse(tokeniser, new Interface({ source: tokeniser.source, tokens }), {\n type: \"interface\",\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [static_member],\n [stringifier],\n [IterableLike.parse],\n [Attribute.parse],\n [Operation.parse]\n ]\n });\n }\n\n get type() {\n return \"interface\";\n }\n\n *validate(defs) {\n if (!this.partial && this.extAttrs.every(extAttr => extAttr.name !== \"Exposed\")) {\n const message = `Interfaces must have \\`[Exposed]\\` extended attribute. \\\nTo fix, add, for example, \\`[Exposed=Window]\\`. Please also consider carefully \\\nif your interface should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(this.source, this.tokens.name, this, message);\n }\n yield* super.validate(defs);\n if (!this.partial) {\n yield* checkInterfaceMemberDuplication(defs, this);\n }\n }\n}\n","import { validationError } from \"../error.js\";\n\nexport function* checkInterfaceMemberDuplication(defs, i) {\n const opNames = new Set(getOperations(i).map(op => op.name));\n const partials = defs.partials.get(i.name) || [];\n const mixins = defs.mixinMap.get(i.name) || [];\n for (const ext of [...partials, ...mixins]) {\n const additions = getOperations(ext);\n yield* forEachExtension(additions, opNames, ext, i);\n for (const addition of additions) {\n opNames.add(addition.name);\n }\n }\n\n function* forEachExtension(additions, existings, ext, base) {\n for (const addition of additions) {\n const { name } = addition;\n if (name && existings.has(name)) {\n const message = `The operation \"${name}\" has already been defined for the base interface \"${base.name}\" either in itself or in a mixin`;\n yield validationError(ext.source, addition.tokens.name, ext, message);\n }\n }\n }\n\n function getOperations(i) {\n return i.members\n .filter(({type}) => type === \"operation\");\n }\n}\n","import { Container } from \"./container.js\";\nimport { Constant } from \"./constant.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { stringifier } from \"./helpers.js\";\n\nexport class Mixin extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, base, { partial } = {}) {\n const tokens = { partial, base };\n tokens.mixin = tokeniser.consume(\"mixin\");\n if (!tokens.mixin) {\n return;\n }\n return Container.parse(tokeniser, new Mixin({ source: tokeniser.source, tokens }), {\n type: \"interface mixin\",\n allowedMembers: [\n [Constant.parse],\n [stringifier],\n [Attribute.parse, { noInherit: true }],\n [Operation.parse, { regular: true }]\n ]\n });\n }\n\n get type() {\n return \"interface mixin\";\n }\n}\n","import { Base } from \"./base.js\";\nimport { unescape, type_with_extended_attributes } from \"./helpers.js\";\nimport { ExtendedAttributes } from \"./extended-attributes.js\";\nimport { Default } from \"./default.js\";\n\nexport class Field extends Base {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser) {\n const tokens = {};\n const ret = new Field({ source: tokeniser.source, tokens });\n ret.extAttrs = ExtendedAttributes.parse(tokeniser);\n tokens.required = tokeniser.consume(\"required\");\n ret.idlType = type_with_extended_attributes(tokeniser, \"dictionary-type\") || tokeniser.error(\"Dictionary member lacks a type\");\n tokens.name = tokeniser.consume(\"identifier\") || tokeniser.error(\"Dictionary member lacks a name\");\n ret.default = Default.parse(tokeniser);\n if (tokens.required && ret.default) tokeniser.error(\"Required member must not have a default\");\n tokens.termination = tokeniser.consume(\";\") || tokeniser.error(\"Unterminated dictionary member, expected `;`\");\n return ret;\n }\n\n get type() {\n return \"field\";\n }\n get name() {\n return unescape(this.tokens.name.value);\n }\n get required() {\n return !!this.tokens.required;\n }\n}\n","import { Container } from \"./container.js\";\nimport { Field } from \"./field.js\";\n\nexport class Dictionary extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"dictionary\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(tokeniser, new Dictionary({ source: tokeniser.source, tokens }), {\n type: \"dictionary\",\n inheritable: !partial,\n allowedMembers: [\n [Field.parse],\n ]\n });\n }\n\n get type() {\n return \"dictionary\";\n }\n}\n","import { Container } from \"./container.js\";\nimport { Attribute } from \"./attribute.js\";\nimport { Operation } from \"./operation.js\";\nimport { validationError } from \"../error.js\";\n\nexport class Namespace extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, { partial } = {}) {\n const tokens = { partial };\n tokens.base = tokeniser.consume(\"namespace\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(tokeniser, new Namespace({ source: tokeniser.source, tokens }), {\n type: \"namespace\",\n allowedMembers: [\n [Attribute.parse, { noInherit: true, readonly: true }],\n [Operation.parse, { regular: true }]\n ]\n });\n }\n\n get type() {\n return \"namespace\";\n }\n\n *validate(defs) {\n if (!this.partial && this.extAttrs.every(extAttr => extAttr.name !== \"Exposed\")) {\n const message = `Namespaces must have [Exposed] extended attribute. \\\nTo fix, add, for example, [Exposed=Window]. Please also consider carefully \\\nif your namespace should also be exposed in a Worker scope. Refer to the \\\n[WebIDL spec section on Exposed](https://heycam.github.io/webidl/#Exposed) \\\nfor more information.`;\n yield validationError(this.source, this.tokens.name, this, message);\n }\n yield* super.validate(defs);\n }\n}\n","import { Container } from \"./container.js\";\nimport { Operation } from \"./operation.js\";\nimport { Constant } from \"./constant.js\";\n\n\nexport class CallbackInterface extends Container {\n /**\n * @param {import(\"../tokeniser\").Tokeniser} tokeniser\n */\n static parse(tokeniser, callback, { partial = null } = {}) {\n const tokens = { callback };\n tokens.base = tokeniser.consume(\"interface\");\n if (!tokens.base) {\n return;\n }\n return Container.parse(tokeniser, new CallbackInterface({ source: tokeniser.source, tokens }), {\n type: \"callback interface\",\n inheritable: !partial,\n allowedMembers: [\n [Constant.parse],\n [Operation.parse, { regular: true }]\n ]\n });\n }\n\n get type() {\n return \"callback interface\";\n }\n}\n","\"use strict\";\n\nimport { Tokeniser } from \"./tokeniser.js\";\nimport { Enum } from \"./productions/enum.js\";\nimport { Includes } from \"./productions/includes.js\";\nimport { ExtendedAttributes } from \"./productions/extended-attributes.js\";\nimport { Typedef } from \"./productions/typedef.js\";\nimport { CallbackFunction } from \"./productions/callback.js\";\nimport { Interface } from \"./productions/interface.js\";\nimport { Mixin } from \"./productions/mixin.js\";\nimport { Dictionary } from \"./productions/dictionary.js\";\nimport { Namespace } from \"./productions/namespace.js\";\nimport { CallbackInterface } from \"./productions/callback-interface.js\";\n\n/**\n * @param {Tokeniser} tokeniser\n * @param {object} options\n * @param {boolean} [options.concrete]\n */\nfunction parseByTokens(tokeniser, options) {\n const source = tokeniser.source;\n\n function error(str) {\n tokeniser.error(str);\n }\n\n function consume(...candidates) {\n return tokeniser.consume(...candidates);\n }\n\n function callback() {\n const callback = consume(\"callback\");\n if (!callback) return;\n if (tokeniser.probe(\"interface\")) {\n return CallbackInterface.parse(tokeniser, callback);\n }\n return CallbackFunction.parse(tokeniser, callback);\n }\n\n function interface_(opts) {\n const base = consume(\"interface\");\n if (!base) return;\n const ret = Mixin.parse(tokeniser, base, opts) ||\n Interface.parse(tokeniser, base, opts) ||\n error(\"Interface has no proper body\");\n return ret;\n }\n\n function partial() {\n const partial = consume(\"partial\");\n if (!partial) return;\n return Dictionary.parse(tokeniser, { partial }) ||\n interface_({ partial }) ||\n Namespace.parse(tokeniser, { partial }) ||\n error(\"Partial doesn't apply to anything\");\n }\n\n function definition() {\n return callback() ||\n interface_() ||\n partial() ||\n Dictionary.parse(tokeniser) ||\n Enum.parse(tokeniser) ||\n Typedef.parse(tokeniser) ||\n Includes.parse(tokeniser) ||\n Namespace.parse(tokeniser);\n }\n\n function definitions() {\n if (!source.length) return [];\n const defs = [];\n while (true) {\n const ea = ExtendedAttributes.parse(tokeniser);\n const def = definition();\n if (!def) {\n if (ea.length) error(\"Stray extended attributes\");\n break;\n }\n def.extAttrs = ea;\n defs.push(def);\n }\n const eof = consume(\"eof\");\n if (options.concrete) {\n defs.push(eof);\n }\n return defs;\n }\n const res = definitions();\n if (tokeniser.position < source.length) error(\"Unrecognised tokens\");\n return res;\n}\n\nexport function parse(str, options = {}) {\n const tokeniser = new Tokeniser(str);\n if (typeof options.sourceName !== \"undefined\") {\n tokeniser.source.name = options.sourceName;\n }\n return parseByTokens(tokeniser, options);\n}\n","\"use strict\";\n\nfunction noop(arg) {\n return arg;\n}\n\nconst templates = {\n wrap: items => items.join(\"\"),\n trivia: noop,\n name: noop,\n reference: noop,\n type: noop,\n generic: noop,\n inheritance: noop,\n definition: noop,\n extendedAttribute: noop,\n extendedAttributeReference: noop\n};\n\nexport function write(ast, { templates: ts = templates } = {}) {\n ts = Object.assign({}, templates, ts);\n\n function reference(raw, { unescaped, context }) {\n if (!unescaped) {\n unescaped = raw.startsWith(\"_\") ? raw.slice(1) : raw;\n }\n return ts.reference(raw, unescaped, context);\n }\n\n function token(t, wrapper = noop, ...args) {\n if (!t) {\n return \"\";\n }\n const value = wrapper(t.value, ...args);\n return ts.wrap([ts.trivia(t.trivia), value]);\n }\n\n function reference_token(t, context) {\n return token(t, reference, { context });\n }\n\n function name_token(t, arg) {\n return token(t, ts.name, arg);\n }\n\n function type_body(it) {\n if (it.union || it.generic) {\n return ts.wrap([\n token(it.tokens.base, ts.generic),\n token(it.tokens.open),\n ...it.subtype.map(type),\n token(it.tokens.close)\n ]);\n }\n const firstToken = it.tokens.prefix || it.tokens.base;\n const prefix = it.tokens.prefix ? [\n it.tokens.prefix.value,\n ts.trivia(it.tokens.base.trivia)\n ] : [];\n const ref = reference(ts.wrap([\n ...prefix,\n it.tokens.base.value,\n token(it.tokens.postfix)\n ]), { unescaped: it.idlType, context: it });\n return ts.wrap([ts.trivia(firstToken.trivia), ref]);\n }\n function type(it) {\n return ts.wrap([\n extended_attributes(it.extAttrs),\n type_body(it),\n token(it.tokens.nullable),\n token(it.tokens.separator)\n ]);\n }\n function default_(def) {\n if (!def) {\n return \"\";\n }\n return ts.wrap([\n token(def.tokens.assign),\n ...def.expression.map(t => token(t))\n ]);\n }\n function argument(arg) {\n return ts.wrap([\n extended_attributes(arg.extAttrs),\n token(arg.tokens.optional),\n ts.type(type(arg.idlType)),\n token(arg.tokens.variadic),\n name_token(arg.tokens.name, { data: arg }),\n default_(arg.default),\n token(arg.tokens.separator)\n ]);\n }\n function identifier(id, context) {\n return ts.wrap([\n reference_token(id.tokens.value, context),\n token(id.tokens.separator)\n ]);\n }\n function make_ext_at(it) {\n const { rhsType } = it.params;\n return ts.wrap([\n ts.trivia(it.tokens.name.trivia),\n ts.extendedAttribute(ts.wrap([\n ts.extendedAttributeReference(it.name),\n token(it.params.tokens.assign),\n reference_token(it.params.tokens.secondaryName, it),\n token(it.params.tokens.open),\n ...!it.params.list ? [] :\n it.params.list.map(\n rhsType === \"identifier-list\" ? id => identifier(id, it) : argument\n ),\n token(it.params.tokens.close)\n ])),\n token(it.tokens.separator)\n ]);\n }\n function extended_attributes(eats) {\n if (!eats.length) return \"\";\n return ts.wrap([\n token(eats.tokens.open),\n ...eats.map(make_ext_at),\n token(eats.tokens.close)\n ]);\n }\n\n function operation(it, parent) {\n const body = it.idlType ? [\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n token(it.tokens.open),\n ts.wrap(it.arguments.map(argument)),\n token(it.tokens.close),\n ] : [];\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.special),\n ...body,\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n\n function attribute(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.special),\n token(it.tokens.readonly),\n token(it.tokens.base),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n\n function inheritance(inh) {\n if (!inh.tokens.inheritance) {\n return \"\";\n }\n return ts.wrap([\n token(inh.tokens.colon),\n ts.trivia(inh.tokens.inheritance.trivia),\n ts.inheritance(reference(inh.tokens.inheritance.value, { context: inh }))\n ]);\n }\n\n function container(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.callback),\n token(it.tokens.partial),\n token(it.tokens.base),\n token(it.tokens.mixin),\n name_token(it.tokens.name, { data: it }),\n inheritance(it),\n token(it.tokens.open),\n iterate(it.members, it),\n token(it.tokens.close),\n token(it.tokens.termination)\n ]), { data: it });\n }\n\n function field(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.required),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n default_(it.default),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n function const_(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it, parent }),\n token(it.tokens.assign),\n token(it.tokens.value),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n function typedef(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n ts.type(type(it.idlType)),\n name_token(it.tokens.name, { data: it }),\n token(it.tokens.termination)\n ]), { data: it });\n }\n function includes(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n reference_token(it.tokens.target, it),\n token(it.tokens.includes),\n reference_token(it.tokens.mixin, it),\n token(it.tokens.termination)\n ]), { data: it });\n }\n function callback(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n name_token(it.tokens.name, { data: it }),\n token(it.tokens.assign),\n ts.type(type(it.idlType)),\n token(it.tokens.open),\n ...it.arguments.map(argument),\n token(it.tokens.close),\n token(it.tokens.termination),\n ]), { data: it });\n }\n function enum_(it) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.base),\n name_token(it.tokens.name, { data: it }),\n token(it.tokens.open),\n iterate(it.values, it),\n token(it.tokens.close),\n token(it.tokens.termination)\n ]), { data: it });\n }\n function enum_value(v, parent) {\n return ts.wrap([\n ts.trivia(v.tokens.value.trivia),\n ts.definition(\n ts.wrap(['\"', ts.name(v.value, { data: v, parent }), '\"']),\n { data: v, parent }\n ),\n token(v.tokens.separator)\n ]);\n }\n function iterable_like(it, parent) {\n return ts.definition(ts.wrap([\n extended_attributes(it.extAttrs),\n token(it.tokens.readonly),\n token(it.tokens.base, ts.generic),\n token(it.tokens.open),\n ts.wrap(it.idlType.map(type)),\n token(it.tokens.close),\n token(it.tokens.termination)\n ]), { data: it, parent });\n }\n function eof(it) {\n return ts.trivia(it.trivia);\n }\n\n const table = {\n interface: container,\n \"interface mixin\": container,\n namespace: container,\n operation,\n attribute,\n dictionary: container,\n field,\n const: const_,\n typedef,\n includes,\n callback,\n enum: enum_,\n \"enum-value\": enum_value,\n iterable: iterable_like,\n legacyiterable: iterable_like,\n maplike: iterable_like,\n setlike: iterable_like,\n \"callback interface\": container,\n eof\n };\n function dispatch(it, parent) {\n const dispatcher = table[it.type];\n if (!dispatcher) {\n throw new Error(`Type \"${it.type}\" is unsupported`);\n }\n return table[it.type](it, parent);\n }\n function iterate(things, parent) {\n if (!things) return;\n const results = things.map(thing => dispatch(thing, parent));\n return ts.wrap(results);\n }\n return iterate(ast);\n}\n","\"use strict\";\n\nimport { validationError as error } from \"./error.js\";\n\nfunction getMixinMap(all, unique) {\n const map = new Map();\n const includes = all.filter(def => def.type === \"includes\");\n for (const include of includes) {\n const mixin = unique.get(include.includes);\n if (!mixin) {\n continue;\n }\n const array = map.get(include.target);\n if (array) {\n array.push(mixin);\n } else {\n map.set(include.target, [mixin]);\n }\n }\n return map;\n}\n\nfunction groupDefinitions(all) {\n const unique = new Map();\n const duplicates = new Set();\n const partials = new Map();\n for (const def of all) {\n if (def.partial) {\n const array = partials.get(def.name);\n if (array) {\n array.push(def);\n } else {\n partials.set(def.name, [def]);\n }\n continue;\n }\n if (!def.name) {\n continue;\n }\n if (!unique.has(def.name)) {\n unique.set(def.name, def);\n } else {\n duplicates.add(def);\n }\n }\n return {\n all,\n unique,\n partials,\n duplicates,\n mixinMap: getMixinMap(all, unique)\n };\n}\n\nfunction* checkDuplicatedNames({ unique, duplicates }) {\n for (const dup of duplicates) {\n const { name } = dup;\n const message = `The name \"${name}\" of type \"${unique.get(name).type}\" was already seen`;\n yield error(dup.source, dup.tokens.name, dup, message);\n }\n}\n\nfunction* validateIterable(ast) {\n const defs = groupDefinitions(ast);\n for (const def of defs.all) {\n if (def.validate) {\n yield* def.validate(defs);\n }\n }\n yield* checkDuplicatedNames(defs);\n}\n\n// Remove this once all of our support targets expose `.flat()` by default\nfunction flatten(array) {\n if (array.flat) {\n return array.flat();\n }\n return [].concat(...array);\n}\n\n/**\n * @param {*} ast AST or array of ASTs\n */\nexport function validate(ast) {\n return [...validateIterable(flatten(ast))];\n}\n","export { parse } from \"./lib/webidl2.js\";\nexport { write } from \"./lib/writer.js\";\nexport { validate } from \"./lib/validator.js\";\n"],"sourceRoot":""} \ No newline at end of file diff --git a/test/invalid/baseline/operation-dict-default.txt b/test/invalid/baseline/argument-dict-default.txt similarity index 57% rename from test/invalid/baseline/operation-dict-default.txt rename to test/invalid/baseline/argument-dict-default.txt index a1b8f457..ff1caf32 100644 --- a/test/invalid/baseline/operation-dict-default.txt +++ b/test/invalid/baseline/argument-dict-default.txt @@ -1,9 +1,9 @@ -Validation error at line 13 in operation-dict-default.webidl, inside `argument dict`: +Validation error at line 13 in argument-dict-default.webidl, inside `argument dict`: void x(optional Dict dict); ^ Optional dictionary arguments must have a default value of `{}`. -Validation error at line 15 in operation-dict-default.webidl, inside `argument union`: +Validation error at line 15 in argument-dict-default.webidl, inside `argument union`: (boolean or Dict) union); ^ Optional dictionary arguments must have a default value of `{}`. -Validation error at line 17 in operation-dict-default.webidl, inside `argument union`: +Validation error at line 17 in argument-dict-default.webidl, inside `argument union`: void z(optional Union union); ^ Optional dictionary arguments must have a default value of `{}`. diff --git a/test/invalid/idl/operation-dict-default.webidl b/test/invalid/idl/argument-dict-default.webidl similarity index 100% rename from test/invalid/idl/operation-dict-default.webidl rename to test/invalid/idl/argument-dict-default.webidl