From 68316a1dd5ccd7d6b61d044a18f9fe0b291d1f14 Mon Sep 17 00:00:00 2001 From: Rousan Ali Date: Fri, 20 Sep 2019 15:52:36 +0530 Subject: [PATCH 01/20] Add release checklist --- checklist.txt | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 checklist.txt diff --git a/checklist.txt b/checklist.txt new file mode 100644 index 0000000..d4db9f1 --- /dev/null +++ b/checklist.txt @@ -0,0 +1,19 @@ +Checklist to release a new build to NPM: + * Clone the repository if needed + * Checkout develop branch and make sure develop branch is up-to-date with remote + * If git flow is not initialized yet, Run: git flow init + * Create a new release branch, Run: git flow release start + * Delete all existing npm dependencies, Run: rm -rf node_modules package-lock.json + * Install dependencies again, Run: npm install + * Update package version with new release version in package.json + * Update LICENSE file if needed + * Update README.md file if needed + * Now, create a release-ready build, Run: npm run build + * Test the dist/* files if needed + * Now commit all the changes with this message: "Make a build and bump version" + * Then finish the release, Run: git flow finish release [-s] and enter release notes + * Push all changes and tags to remote, Run: git push origin master && git push origin develop && git push origin --tags + * Edit the title of the released tag in Github + * When everything is fine, it's ready to release + * Checkout master branch + * Now if everything is fine, release it to npm, Run: npm publish \ No newline at end of file From 738cf817214c83a6065fe3b8cc8a66423dac1202 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 25 Sep 2019 16:55:17 +0530 Subject: [PATCH 02/20] Implemented data converters as registery --- src/converter/dataConverterStore.js | 78 +++++++++++++++++++ .../defaultConverters/autoCoverter.js | 13 ++++ .../defaultConverters/dsvArrayConverter.js | 13 ++++ .../defaultConverters/dsvStringConverter.js | 13 ++++ .../defaultConverters/jsonConverter.js | 13 ++++ src/converter/index.js | 8 +- src/converter/model/dataConverter.js | 17 ++++ src/converter/{ => utils}/auto-resolver.js | 2 +- .../{ => utils}/auto-resolver.spec.js | 0 src/converter/{ => utils}/dsv-arr.js | 2 +- src/converter/{ => utils}/dsv-arr.spec.js | 0 src/converter/{ => utils}/dsv-str.js | 0 src/converter/{ => utils}/dsv-str.spec.js | 0 src/converter/{ => utils}/flat-json.js | 2 +- src/converter/{ => utils}/flat-json.spec.js | 0 src/datamodel.js | 7 ++ src/export.js | 4 +- src/helper.js | 9 ++- 18 files changed, 169 insertions(+), 12 deletions(-) create mode 100644 src/converter/dataConverterStore.js create mode 100644 src/converter/defaultConverters/autoCoverter.js create mode 100644 src/converter/defaultConverters/dsvArrayConverter.js create mode 100644 src/converter/defaultConverters/dsvStringConverter.js create mode 100644 src/converter/defaultConverters/jsonConverter.js create mode 100644 src/converter/model/dataConverter.js rename src/converter/{ => utils}/auto-resolver.js (93%) rename src/converter/{ => utils}/auto-resolver.spec.js (100%) rename src/converter/{ => utils}/dsv-arr.js (97%) rename src/converter/{ => utils}/dsv-arr.spec.js (100%) rename src/converter/{ => utils}/dsv-str.js (100%) rename src/converter/{ => utils}/dsv-str.spec.js (100%) rename src/converter/{ => utils}/flat-json.js (96%) rename src/converter/{ => utils}/flat-json.spec.js (100%) diff --git a/src/converter/dataConverterStore.js b/src/converter/dataConverterStore.js new file mode 100644 index 0000000..567c44a --- /dev/null +++ b/src/converter/dataConverterStore.js @@ -0,0 +1,78 @@ +import DataConverter from './model/dataConverter' +import DSVStringConverter from './defaultConverters/dsvStringConverter'; +import JSONConverter from './defaultConverters/jsonConverter'; +import DSVArrayConverter from './defaultConverters/dsvArrayConverter'; +import AutoDataConverter from './defaultConverters/autoCoverter' + +class DataConverterStore { + constructor(){ + this.store = new Map(); + this.converters(this._getDefaultConverters()); + } + + _getDefaultConverters(){ + return [ + new DSVStringConverter(), + new DSVArrayConverter(), + new JSONConverter(), + new AutoDataConverter() + ] + } + + /** + * + * @param {Array} converters : contains array of converter instance + * @return { Map } + */ + converters(converters){ + if(converters.length){ + converters.forEach(converter => this.store.set(converter.type,converter)); + } + return this.store; + } + + /** + * + * @param {DataConverter} converter : converter Instance + * @returns self + */ + register(converter){ + if(converter instanceof DataConverter){ + this.store.set(converter.type,converter) + } + return this; + } + + /** + * + * @param {DataConverter} converter : converter Instance + * @returns self + */ + + unregister(converter){ + this.store.delete(converter.type) + return this; + } + + get(name){ + if(this.store.has(name)){ + return this.store.get(name); + } + return null; + } + +} + +const converterStore = (function (){ + let store = null; + + const getStore = () => { + if (store === null) { + store = new DataConverterStore(); + } + return store; + } + return getStore(); +}); + +export default converterStore; \ No newline at end of file diff --git a/src/converter/defaultConverters/autoCoverter.js b/src/converter/defaultConverters/autoCoverter.js new file mode 100644 index 0000000..c93f05c --- /dev/null +++ b/src/converter/defaultConverters/autoCoverter.js @@ -0,0 +1,13 @@ +import DataConverter from "../model/dataConverter"; +import AUTO from '../utils/auto-resolver'; +import DataFormat from '../../enums/data-format' + +export default class AutoDataConverter extends DataConverter{ + constructor(){ + super(DataFormat.AUTO) + } + + convert(data , schema , options){ + return AUTO(data,schema,options); + } +} \ No newline at end of file diff --git a/src/converter/defaultConverters/dsvArrayConverter.js b/src/converter/defaultConverters/dsvArrayConverter.js new file mode 100644 index 0000000..9526fb2 --- /dev/null +++ b/src/converter/defaultConverters/dsvArrayConverter.js @@ -0,0 +1,13 @@ +import DataConverter from "../model/dataConverter"; +import DSVArr from '../utils/dsv-arr'; +import DataFormat from '../../enums/data-format' + +export default class DSVArrayConverter extends DataConverter{ + constructor(){ + super(DataFormat.DSV_ARR); + } + + convert(data , schema , options){ + return DSVArr(data,schema,options); + } +} \ No newline at end of file diff --git a/src/converter/defaultConverters/dsvStringConverter.js b/src/converter/defaultConverters/dsvStringConverter.js new file mode 100644 index 0000000..b504b8b --- /dev/null +++ b/src/converter/defaultConverters/dsvStringConverter.js @@ -0,0 +1,13 @@ +import DataConverter from "../model/dataConverter"; +import DSVStr from "../utils/dsv-str"; +import DataFormat from '../../enums/data-format' + +export default class DSVStringConverter extends DataConverter{ + constructor(){ + super(DataFormat.DSV_STR) + } + + convert(data , schema , options){ + return DSVStr(data,schema,options); + } +} \ No newline at end of file diff --git a/src/converter/defaultConverters/jsonConverter.js b/src/converter/defaultConverters/jsonConverter.js new file mode 100644 index 0000000..8fc9b90 --- /dev/null +++ b/src/converter/defaultConverters/jsonConverter.js @@ -0,0 +1,13 @@ +import DataConverter from "../model/dataConverter"; +import FlatJSON from '../utils/flat-json'; +import DataFormat from '../../enums/data-format' + +export default class JSONConverter extends DataConverter{ + constructor(){ + super(DataFormat.FLAT_JSON) + } + + convert(data , schema , options){ + return FlatJSON(data,schema,options); + } +} \ No newline at end of file diff --git a/src/converter/index.js b/src/converter/index.js index c5cc212..0d7f8fc 100644 --- a/src/converter/index.js +++ b/src/converter/index.js @@ -1,4 +1,4 @@ -export { default as DSVArr } from './dsv-arr'; -export { default as DSVStr } from './dsv-str'; -export { default as FlatJSON } from './flat-json'; -export { default as Auto } from './auto-resolver'; +import converterStore from './dataConverterStore'; +import DataConverter from './model/dataConverter'; + +export { DataConverter, converterStore } ; diff --git a/src/converter/model/dataConverter.js b/src/converter/model/dataConverter.js new file mode 100644 index 0000000..fe055a2 --- /dev/null +++ b/src/converter/model/dataConverter.js @@ -0,0 +1,17 @@ +/** + * Interface for all data converters + */ +export default class DataConverter{ + constructor(type){ + this._type = type; + } + + get type(){ + return this._type; + } + + convert(data,schema,options){ + throw new Error("Convert method not implemented.") + } + +} \ No newline at end of file diff --git a/src/converter/auto-resolver.js b/src/converter/utils/auto-resolver.js similarity index 93% rename from src/converter/auto-resolver.js rename to src/converter/utils/auto-resolver.js index 2dc6ee2..7453858 100644 --- a/src/converter/auto-resolver.js +++ b/src/converter/utils/auto-resolver.js @@ -1,7 +1,7 @@ import FlatJSON from './flat-json'; import DSVArr from './dsv-arr'; import DSVStr from './dsv-str'; -import { detectDataFormat } from '../utils'; +import { detectDataFormat } from '../../utils'; /** * Parses the input data and detect the format automatically. diff --git a/src/converter/auto-resolver.spec.js b/src/converter/utils/auto-resolver.spec.js similarity index 100% rename from src/converter/auto-resolver.spec.js rename to src/converter/utils/auto-resolver.spec.js diff --git a/src/converter/dsv-arr.js b/src/converter/utils/dsv-arr.js similarity index 97% rename from src/converter/dsv-arr.js rename to src/converter/utils/dsv-arr.js index 9366fa5..130096a 100644 --- a/src/converter/dsv-arr.js +++ b/src/converter/utils/dsv-arr.js @@ -1,4 +1,4 @@ -import { columnMajor } from '../utils'; +import { columnMajor } from '../../utils'; /** * Parses and converts data formatted in DSV array to a manageable internal format. diff --git a/src/converter/dsv-arr.spec.js b/src/converter/utils/dsv-arr.spec.js similarity index 100% rename from src/converter/dsv-arr.spec.js rename to src/converter/utils/dsv-arr.spec.js diff --git a/src/converter/dsv-str.js b/src/converter/utils/dsv-str.js similarity index 100% rename from src/converter/dsv-str.js rename to src/converter/utils/dsv-str.js diff --git a/src/converter/dsv-str.spec.js b/src/converter/utils/dsv-str.spec.js similarity index 100% rename from src/converter/dsv-str.spec.js rename to src/converter/utils/dsv-str.spec.js diff --git a/src/converter/flat-json.js b/src/converter/utils/flat-json.js similarity index 96% rename from src/converter/flat-json.js rename to src/converter/utils/flat-json.js index 14f0bc8..c76ef9d 100644 --- a/src/converter/flat-json.js +++ b/src/converter/utils/flat-json.js @@ -1,4 +1,4 @@ -import { columnMajor } from '../utils'; +import { columnMajor } from '../../utils'; /** * Parses and converts data formatted in JSON to a manageable internal format. diff --git a/src/converter/flat-json.spec.js b/src/converter/utils/flat-json.spec.js similarity index 100% rename from src/converter/flat-json.spec.js rename to src/converter/utils/flat-json.spec.js diff --git a/src/datamodel.js b/src/datamodel.js index 31e986e..3a15792 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -25,6 +25,7 @@ import reducerStore from './utils/reducer-store'; import { createFields } from './field-creator'; import InvalidAwareTypes from './invalid-aware-types'; import Value from './value'; +import { converterStore } from './converter' /** * DataModel is an in-browser representation of tabular data. It supports @@ -94,6 +95,12 @@ class DataModel extends Relation { return reducerStore; } + /** + * Converters are functions that transforms data in various format tpo datamodel consumabe format. + */ + static get Converters(){ + return converterStore(); + } /** * Configure null, undefined, invalid values in the source data * diff --git a/src/export.js b/src/export.js index ed7d7c6..c656ac7 100644 --- a/src/export.js +++ b/src/export.js @@ -17,6 +17,7 @@ import { } from './operator'; import * as Stats from './stats'; import * as enums from './enums'; +import { DataConverter } from './converter' import { DateTimeFormatter } from './utils'; import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; @@ -48,7 +49,8 @@ Object.assign(DataModel, { DataFormat, FilteringMode, InvalidAwareTypes, - version + version, + DataConverter }, enums); export default DataModel; diff --git a/src/helper.js b/src/helper.js index 2da2f26..42df390 100644 --- a/src/helper.js +++ b/src/helper.js @@ -7,7 +7,7 @@ import { import { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import defaultConfig from './default-config'; -import * as converter from './converter'; +import { converterStore } from './converter'; import { extend2, detectDataFormat } from './utils'; /** @@ -420,13 +420,14 @@ export const resolveFieldName = (schema, dataHeader) => { export const updateData = (relation, data, schema, options) => { schema = sanitizeAndValidateSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); - const converterFn = converter[options.dataFormat]; + const converter = converterStore().get(options.dataFormat); + - if (!(converterFn && typeof converterFn === 'function')) { + if (!converter) { throw new Error(`No converter function found for ${options.dataFormat} format`); } - const [header, formattedData] = converterFn(data, schema, options); + const [header, formattedData] = converter.convert(data, schema, options); resolveFieldName(schema, header); const fieldArr = createFields(formattedData, schema, header); From 994362b87d3ec4f6b70f69658f04c7a852ff5f0a Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 25 Sep 2019 17:26:59 +0530 Subject: [PATCH 03/20] Implemented data converters as registery --- example/index.html | 2 +- example/samples/example2.js | 68 ++++++++++++++++++++++++----- src/converter/dataConverterStore.js | 6 +-- src/datamodel.js | 2 +- src/helper.js | 2 +- 5 files changed, 64 insertions(+), 16 deletions(-) diff --git a/example/index.html b/example/index.html index d2d791d..2b4d5ae 100644 --- a/example/index.html +++ b/example/index.html @@ -13,7 +13,7 @@ - + \ No newline at end of file diff --git a/example/samples/example2.js b/example/samples/example2.js index 0fd3bd2..fd84ae6 100644 --- a/example/samples/example2.js +++ b/example/samples/example2.js @@ -1,4 +1,59 @@ -const DataModel = window.DataModel.default; +// const DataModel = window.DataModel.default; +const columnMajor = (store) => { + let i = 0; + return (...fields) => { + fields.forEach((val, fieldIndex) => { + if (!(store[fieldIndex] instanceof Array)) { + store[fieldIndex] = Array.from({ length: i }); + } + store[fieldIndex].push(val); + }); + i++; + }; +}; + + +function FlatJSON222 (arr, schema) { + if (!Array.isArray(schema)) { + throw new Error('Schema missing or is in an unsupported format'); + } + + const header = {}; + let i = 0; + let insertionIndex; + const columns = []; + const push = columnMajor(columns); + const schemaFieldsName = schema.map(unitSchema => unitSchema.name); + + arr.forEach((item) => { + const fields = []; + schemaFieldsName.forEach((unitSchema) => { + if (unitSchema in header) { + insertionIndex = header[unitSchema]; + } else { + header[unitSchema] = i++; + insertionIndex = i - 1; + } + fields[insertionIndex] = item[unitSchema]; + }); + push(...fields); + }); + + return [Object.keys(header), columns]; +} + +class JSONConverter2 extends DataModel.DataConverter{ + constructor(){ + super("json2") + } + + convert(data , schema , options){ + console.log("this is json2") + return FlatJSON222(data,schema,options); + } +} + +DataModel.Converters.register(new JSONConverter2()); const schema = [ { @@ -50,13 +105,6 @@ const data = [ } ]; -const dm = new DataModel(data, schema); -const dataGenerated = dm.getData({ - order: 'column', - formatter: { - birthday: val => new Date(val), - name: val => `Name: ${val}` - } -}); +const dm = new DataModel(data, schema,{ dataFormat:"json2" }); -console.log(dataGenerated); +console.log(dm.getData()); diff --git a/src/converter/dataConverterStore.js b/src/converter/dataConverterStore.js index 567c44a..1cde90a 100644 --- a/src/converter/dataConverterStore.js +++ b/src/converter/dataConverterStore.js @@ -63,16 +63,16 @@ class DataConverterStore { } -const converterStore = (function (){ +const converterStore = (function () { let store = null; - const getStore = () => { + function getStore () { if (store === null) { store = new DataConverterStore(); } return store; } return getStore(); -}); +}()); export default converterStore; \ No newline at end of file diff --git a/src/datamodel.js b/src/datamodel.js index 3a15792..9f4bbff 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -99,7 +99,7 @@ class DataModel extends Relation { * Converters are functions that transforms data in various format tpo datamodel consumabe format. */ static get Converters(){ - return converterStore(); + return converterStore; } /** * Configure null, undefined, invalid values in the source data diff --git a/src/helper.js b/src/helper.js index 42df390..9acce88 100644 --- a/src/helper.js +++ b/src/helper.js @@ -420,7 +420,7 @@ export const resolveFieldName = (schema, dataHeader) => { export const updateData = (relation, data, schema, options) => { schema = sanitizeAndValidateSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); - const converter = converterStore().get(options.dataFormat); + const converter = converterStore.get(options.dataFormat); if (!converter) { From e39c32710897b337d2b50806a7734d02b9e21a90 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 17:27:57 +0530 Subject: [PATCH 04/20] Added Field Registry --- src/export.js | 4 +++- src/fields/FieldRegistry.js | 36 +++++++++++++++++++++++++++++ src/fields/field/index.js | 46 +++++++++++++++++++++++++++++++++++++ src/fields/index.js | 1 + 4 files changed, 86 insertions(+), 1 deletion(-) create mode 100644 src/fields/FieldRegistry.js diff --git a/src/export.js b/src/export.js index c656ac7..a29dee7 100644 --- a/src/export.js +++ b/src/export.js @@ -22,6 +22,7 @@ import { DateTimeFormatter } from './utils'; import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; import pkg from '../package.json'; +import * as Fields from './fields'; const Operators = { compose, @@ -50,7 +51,8 @@ Object.assign(DataModel, { FilteringMode, InvalidAwareTypes, version, - DataConverter + DataConverter, + Fields }, enums); export default DataModel; diff --git a/src/fields/FieldRegistry.js b/src/fields/FieldRegistry.js new file mode 100644 index 0000000..ea0f761 --- /dev/null +++ b/src/fields/FieldRegistry.js @@ -0,0 +1,36 @@ +export { default as Field } from './field'; +export { default as Categorical } from './categorical'; +export { default as Temporal } from './temporal'; +export { default as Binned } from './binned'; +export { default as Continuous } from './continuous'; + +class FieldTypeRegistry{ + constructor(){ + this._measures = new Map(); + this._dimensions = new Map(); + } + + registerMeasure(subtype,measure){ + this._measures.set(subtype,measure); + return this; + } + + registerDimension(subtype,dimension){ + this._dimensions.set(subtype,dimension); + return this; + } +} + +export const fieldRegistry = (function () { + let store = null; + + function getStore () { + if (store === null) { + store = new FieldTypeRegistry(); + } + return store; + } + return getStore(); +}()); + + diff --git a/src/fields/field/index.js b/src/fields/field/index.js index 267afb0..5ba1709 100644 --- a/src/fields/field/index.js +++ b/src/fields/field/index.js @@ -1,4 +1,6 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; +import PartialField from '../partial-field' +import FieldParser from '../parsers/field-parser'; /** * In {@link DataModel}, every tabular data consists of column, a column is stored as field. @@ -125,4 +127,48 @@ export default class Field { formattedData () { throw new Error('Not yet implemented'); } + + static get BUILDER(){ + const builder = { + _params : {}, + _context : this, + fieldName : function(name) { + this._params.name = name; + return this; + }, + schema : function(schema){ + this._params.schema = schema; + return this; + }, + data : function(data){ + this._params.data = data; + return this; + }, + parser : function(parser){ + this._params.parser = parser; + return this; + }, + partialField : function(partialField){ + this._params.partialField = partialField + return this; + }, + rowDiffset : function(rowDiffset){ + this._params.rowDiffset = rowDiffset + return this; + }, + build : function(){ + let partialField = null; + if(this._params.partialField instanceof PartialField){ + partialField = this._params.partialField + }else if(this._params.schema && this.params.data && this.params.parser instanceof FieldParser){ + partialField = new PartialField(this._params.schema.name,this.params.data,this.params.schema,this.params.parser) + } + else { + throw new Error("Invalid Field parameters") + } + return new this._context(partialField,this._params.rowDiffset); + } + } + return builder; + } } diff --git a/src/fields/index.js b/src/fields/index.js index fbe76bf..002b246 100644 --- a/src/fields/index.js +++ b/src/fields/index.js @@ -11,3 +11,4 @@ export { default as TemporalParser } from './parsers/temporal-parser'; export { default as BinnedParser } from './parsers/binned-parser'; export { default as ContinuousParser } from './parsers/continuous-parser'; export { default as PartialField } from './partial-field'; +export { default as FieldRegistry } from './FieldRegistry' From 7f6c0fd04ac9013c760877c22fb1f5fbcd31f75e Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 17:30:13 +0530 Subject: [PATCH 05/20] Added Field Registry --- src/fields/{FieldRegistry.js => field-registry.js} | 4 +++- src/fields/index.js | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) rename src/fields/{FieldRegistry.js => field-registry.js} (92%) diff --git a/src/fields/FieldRegistry.js b/src/fields/field-registry.js similarity index 92% rename from src/fields/FieldRegistry.js rename to src/fields/field-registry.js index ea0f761..1b2e51b 100644 --- a/src/fields/FieldRegistry.js +++ b/src/fields/field-registry.js @@ -21,7 +21,7 @@ class FieldTypeRegistry{ } } -export const fieldRegistry = (function () { +const fieldRegistry = (function () { let store = null; function getStore () { @@ -33,4 +33,6 @@ export const fieldRegistry = (function () { return getStore(); }()); +export default fieldRegistry; + diff --git a/src/fields/index.js b/src/fields/index.js index 002b246..9030c76 100644 --- a/src/fields/index.js +++ b/src/fields/index.js @@ -11,4 +11,4 @@ export { default as TemporalParser } from './parsers/temporal-parser'; export { default as BinnedParser } from './parsers/binned-parser'; export { default as ContinuousParser } from './parsers/continuous-parser'; export { default as PartialField } from './partial-field'; -export { default as FieldRegistry } from './FieldRegistry' +export { default as fieldRegistry } from './field-registry'; From 4b661958e22aa901d9cee67f986835ae3a861ec5 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 18:35:05 +0530 Subject: [PATCH 06/20] Added Individual field parser to individual fields : exit --- src/enums/dimension-subtype.js | 1 - src/fields/binned/index.js | 5 ++++ src/fields/categorical/index.js | 5 ++++ src/fields/continuous/index.js | 5 ++++ src/fields/field-registry.js | 21 +++++++++++---- src/fields/field/index.js | 4 +++ src/fields/parsers/temporal-parser/index.js | 15 ++++++----- .../parsers/temporal-parser/index.spec.js | 26 +++++++++---------- src/fields/partial-field/index.js | 2 +- src/fields/partial-field/index.spec.js | 4 +-- src/fields/temporal/index.js | 5 ++++ 11 files changed, 65 insertions(+), 28 deletions(-) diff --git a/src/enums/dimension-subtype.js b/src/enums/dimension-subtype.js index 70c4da3..9a9f17b 100644 --- a/src/enums/dimension-subtype.js +++ b/src/enums/dimension-subtype.js @@ -7,7 +7,6 @@ const DimensionSubtype = { CATEGORICAL: 'categorical', TEMPORAL: 'temporal', - GEO: 'geo', BINNED: 'binned' }; diff --git a/src/fields/binned/index.js b/src/fields/binned/index.js index 04d1fbf..5f34d37 100644 --- a/src/fields/binned/index.js +++ b/src/fields/binned/index.js @@ -1,4 +1,5 @@ import Dimension from '../dimension'; +import BinnedParser from '../parsers/binned-parser' /** * Represents binned field subtype. @@ -29,4 +30,8 @@ export default class Binned extends Dimension { bins () { return this.partialField.schema.bins; } + + static parser(){ + return new BinnedParser(); + } } diff --git a/src/fields/categorical/index.js b/src/fields/categorical/index.js index 267cbfc..48c0ea6 100644 --- a/src/fields/categorical/index.js +++ b/src/fields/categorical/index.js @@ -1,6 +1,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { DimensionSubtype } from '../../enums'; import Dimension from '../dimension'; +import CategoricalParser from '../parsers/categorical-parser' /** * Represents categorical field subtype. * @@ -41,4 +42,8 @@ export default class Categorical extends Dimension { }); return domain; } + + static parser(){ + return new CategoricalParser(); + } } diff --git a/src/fields/continuous/index.js b/src/fields/continuous/index.js index d0f68b3..3067a5e 100644 --- a/src/fields/continuous/index.js +++ b/src/fields/continuous/index.js @@ -2,6 +2,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { MeasureSubtype } from '../../enums'; import Measure from '../measure'; import InvalidAwareTypes from '../../invalid-aware-types'; +import ContinuousParser from '../parsers/continuous-parser' /** * Represents continuous field subtype. @@ -50,4 +51,8 @@ export default class Continuous extends Measure { return [min, max]; } + + static parser(){ + return new ContinuousParser(); + } } diff --git a/src/fields/field-registry.js b/src/fields/field-registry.js index 1b2e51b..43992d8 100644 --- a/src/fields/field-registry.js +++ b/src/fields/field-registry.js @@ -1,8 +1,9 @@ -export { default as Field } from './field'; -export { default as Categorical } from './categorical'; -export { default as Temporal } from './temporal'; -export { default as Binned } from './binned'; -export { default as Continuous } from './continuous'; +import Categorical from './categorical'; +import Temporal from './temporal'; +import Binned from './binned'; +import Continuous from './continuous'; +import { DimensionSubtype ,MeasureSubtype} from '../enums' + class FieldTypeRegistry{ constructor(){ @@ -21,12 +22,21 @@ class FieldTypeRegistry{ } } +const registerDefaultFields = (store) => { + store + .registerDimension(DimensionSubtype.CATEGORICAL,Categorical) + .registerDimension(DimensionSubtype.TEMPORAL,Temporal) + .registerDimension(DimensionSubtype.BINNED,Binned) + .registerMeasure(MeasureSubtype.CONTINUOUS,Continuous) +} + const fieldRegistry = (function () { let store = null; function getStore () { if (store === null) { store = new FieldTypeRegistry(); + registerDefaultFields(store); } return store; } @@ -36,3 +46,4 @@ const fieldRegistry = (function () { export default fieldRegistry; + diff --git a/src/fields/field/index.js b/src/fields/field/index.js index 5ba1709..7d178a9 100644 --- a/src/fields/field/index.js +++ b/src/fields/field/index.js @@ -34,6 +34,10 @@ export default class Field { this.rowDiffset = rowDiffset; } + static parser(){ + throw new Error("Not yet implemented") + } + /** * Generates the field type specific domain. * diff --git a/src/fields/parsers/temporal-parser/index.js b/src/fields/parsers/temporal-parser/index.js index 0c02894..a5622a8 100644 --- a/src/fields/parsers/temporal-parser/index.js +++ b/src/fields/parsers/temporal-parser/index.js @@ -16,11 +16,11 @@ export default class TemporalParser extends FieldParser { * @public * @param {Object} schema - The schema object for the corresponding field. */ - constructor (schema) { - super(); - this.schema = schema; - this._dtf = new DateTimeFormatter(this.schema.format); - } + // constructor (schema) { + // super(); + // this.schema = schema; + // this._dtf = new DateTimeFormatter(format); + // } /** * Parses a single value of a field and returns the millisecond value. @@ -29,9 +29,12 @@ export default class TemporalParser extends FieldParser { * @param {string|number} val - The value of the field. * @return {number} Returns the millisecond value. */ - parse (val) { + parse (val, { format } ) { let result; // check if invalid date value + if(!this._dtf){ + this._dtf = new DateTimeFormatter(format); + } if (!InvalidAwareTypes.isInvalid(val)) { let nativeDate = this._dtf.getNativeDate(val); result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA; diff --git a/src/fields/parsers/temporal-parser/index.spec.js b/src/fields/parsers/temporal-parser/index.spec.js index 5805cf6..73d4aa5 100644 --- a/src/fields/parsers/temporal-parser/index.spec.js +++ b/src/fields/parsers/temporal-parser/index.spec.js @@ -17,37 +17,37 @@ describe('TemporalParser', () => { let temParser; beforeEach(() => { - temParser = new TemporalParser(schema); + temParser = new TemporalParser(); }); describe('#parse', () => { it('should return milliseconds for the formatted value', () => { const dateStr = '2017-03-01'; const expectedTs = new DateTimeFormatter(schema.format).getNativeDate(dateStr).getTime(); - expect(temParser.parse(dateStr)).to.equal(expectedTs); + expect(temParser.parse(dateStr,{ format:schema.format })).to.equal(expectedTs); }); it('should bypass to Date API when format is not present', () => { const val = 1540629018697; - temParser = new TemporalParser(Object.assign({}, schema, { format: undefined })); - expect(temParser.parse(val)).to.equal(+new Date(val)); + temParser = new TemporalParser(); + expect(temParser.parse(val,{ format: undefined })).to.equal(+new Date(val)); }); it('should return default invalid type for invalid value', () => { - expect(temParser.parse(null)).to.eql(DataModel.InvalidAwareTypes.NULL); - expect(temParser.parse(undefined)).to.equal(DataModel.InvalidAwareTypes.NA); - expect(temParser.parse('abcd')).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse(null,{ format:schema.format })).to.eql(DataModel.InvalidAwareTypes.NULL); + expect(temParser.parse(undefined,{ format:schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('abcd',{ format:schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); }); it('should return valid date for edge case', () => { - expect(temParser.parse('')).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('',{ format:schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); - temParser = new TemporalParser(Object.assign({}, schema, { format: '%Y' })); - expect(temParser.parse('1998')).to.equal(new Date(1998, 0, 1).getTime()); + temParser = new TemporalParser(); + expect(temParser.parse('1998',{ format: '%Y' })).to.equal(new Date(1998, 0, 1).getTime()); - temParser = new TemporalParser(Object.assign({}, schema, { format: '%y' })); - expect(temParser.parse('98')).to.equal(new Date(1998, 0, 1).getTime()); + temParser = new TemporalParser(); + expect(temParser.parse('98',{ format: '%y' })).to.equal(new Date(1998, 0, 1).getTime()); - expect(temParser.parse('abcd')).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('abcd',{ format: '%y' })).to.equal(DataModel.InvalidAwareTypes.NA); }); }); }); diff --git a/src/fields/partial-field/index.js b/src/fields/partial-field/index.js index 2795f92..299ba7e 100644 --- a/src/fields/partial-field/index.js +++ b/src/fields/partial-field/index.js @@ -31,6 +31,6 @@ export default class PartialField { * @return {Array} Returns the sanitized data. */ _sanitize (data) { - return data.map(datum => this.parser.parse(datum)); + return data.map(datum => this.parser.parse(datum, { format : this.schema.format })); } } diff --git a/src/fields/partial-field/index.spec.js b/src/fields/partial-field/index.spec.js index 7f286d7..2a29a7f 100644 --- a/src/fields/partial-field/index.spec.js +++ b/src/fields/partial-field/index.spec.js @@ -19,7 +19,7 @@ describe('PartialField', () => { let temParser; beforeEach(() => { - temParser = new TemporalParser(schema); + temParser = new TemporalParser(); partField = new PartialField(schema.name, data, schema, temParser); }); @@ -31,7 +31,7 @@ describe('PartialField', () => { }); it('should sanitize the input data before use', () => { - const expected = data.map(d => temParser.parse(d)); + const expected = data.map(d => temParser.parse(d,{ format:schema.format })); expect(partField.data).to.eql(expected); }); }); diff --git a/src/fields/temporal/index.js b/src/fields/temporal/index.js index 0b3c540..eeac4af 100644 --- a/src/fields/temporal/index.js +++ b/src/fields/temporal/index.js @@ -2,6 +2,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import Dimension from '../dimension'; import { DateTimeFormatter } from '../../utils'; import InvalidAwareTypes from '../../invalid-aware-types'; +import TemporalParser from '../parsers/temporal-parser' /** * Represents temporal field subtype. @@ -121,5 +122,9 @@ export default class Temporal extends Dimension { }); return data; } + + static parser(){ + return new TemporalParser(); + } } From 2727a7c0ad655bd61207a04fb027861dd22b671f Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 18:35:50 +0530 Subject: [PATCH 07/20] Added Individual field parser to individual fields : exit --- src/fields/partial-field/index.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fields/partial-field/index.spec.js b/src/fields/partial-field/index.spec.js index 2a29a7f..290e08c 100644 --- a/src/fields/partial-field/index.spec.js +++ b/src/fields/partial-field/index.spec.js @@ -31,7 +31,7 @@ describe('PartialField', () => { }); it('should sanitize the input data before use', () => { - const expected = data.map(d => temParser.parse(d,{ format:schema.format })); + const expected = data.map(d => temParser.parse(d,{ format : schema.format })); expect(partField.data).to.eql(expected); }); }); From 3f02fa378fc1650f7da6834dc365479fb6cf5c9e Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 18:39:24 +0530 Subject: [PATCH 08/20] Added Field Registry --- src/fields/field/index.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/fields/field/index.js b/src/fields/field/index.js index 7d178a9..a9db81a 100644 --- a/src/fields/field/index.js +++ b/src/fields/field/index.js @@ -148,10 +148,6 @@ export default class Field { this._params.data = data; return this; }, - parser : function(parser){ - this._params.parser = parser; - return this; - }, partialField : function(partialField){ this._params.partialField = partialField return this; @@ -165,7 +161,7 @@ export default class Field { if(this._params.partialField instanceof PartialField){ partialField = this._params.partialField }else if(this._params.schema && this.params.data && this.params.parser instanceof FieldParser){ - partialField = new PartialField(this._params.schema.name,this.params.data,this.params.schema,this.params.parser) + partialField = new PartialField(this._params.schema.name, this.params.data, this.params.schema, this._context.parser()) } else { throw new Error("Invalid Field parameters") From eed100a8c101d20a8433bb989f0a902ccbfabfc7 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 20:04:34 +0530 Subject: [PATCH 09/20] Field Registry complete --- src/create-fields.spec.js | 11 ++- src/datamodel.js | 9 +++ src/export.js | 4 +- src/field-creator.js | 153 ++++++++++++++++++++++++++--------- src/fields/field-registry.js | 24 +++--- src/fields/field/index.js | 4 +- src/fields/index.js | 10 --- 7 files changed, 148 insertions(+), 67 deletions(-) diff --git a/src/create-fields.spec.js b/src/create-fields.spec.js index b0ea799..ac59fdd 100644 --- a/src/create-fields.spec.js +++ b/src/create-fields.spec.js @@ -2,16 +2,19 @@ /* eslint-disable no-unused-expressions */ import { expect } from 'chai'; -import { Categorical, Temporal, Binned, Continuous } from './fields'; +import Categorical from './fields/categorical'; +import Temporal from './fields/temporal'; +import Binned from './fields/binned'; +import Continuous from './fields/continuous'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import { MeasureSubtype, DimensionSubtype } from './enums'; +import PartialField from './fields/partial-field' describe('Creating Field', () => { describe('#createUnitFieldFromPartial', () => { it('should return an array of correct field instances', () => { - let mockedPartialField = { - schema: { name: 'Country' } - }; + let mockedPartialField = new PartialField('Country',[],{ name: 'Country' }, null); + // mockedPartialField.schema = { name: 'Country' } let mockedRowDiffset = '1-2'; expect(createUnitFieldFromPartial(mockedPartialField, mockedRowDiffset) instanceof Categorical).to.be.true; diff --git a/src/datamodel.js b/src/datamodel.js index 9f4bbff..5768f51 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -26,6 +26,7 @@ import { createFields } from './field-creator'; import InvalidAwareTypes from './invalid-aware-types'; import Value from './value'; import { converterStore } from './converter' +import { fieldRegistry } from './fields' /** * DataModel is an in-browser representation of tabular data. It supports @@ -101,6 +102,14 @@ class DataModel extends Relation { static get Converters(){ return converterStore; } + + /** + * Register new type of fields + */ + static get FieldTypes(){ + return fieldRegistry; + } + /** * Configure null, undefined, invalid values in the source data * diff --git a/src/export.js b/src/export.js index a29dee7..b9ca731 100644 --- a/src/export.js +++ b/src/export.js @@ -22,7 +22,7 @@ import { DateTimeFormatter } from './utils'; import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; import pkg from '../package.json'; -import * as Fields from './fields'; +import * as FieldsUtility from './fields'; const Operators = { compose, @@ -52,7 +52,7 @@ Object.assign(DataModel, { InvalidAwareTypes, version, DataConverter, - Fields + FieldsUtility }, enums); export default DataModel; diff --git a/src/field-creator.js b/src/field-creator.js index 6d69eac..97760a2 100644 --- a/src/field-creator.js +++ b/src/field-creator.js @@ -11,6 +11,8 @@ import { PartialField } from './fields'; +import { fieldRegistry } from './fields' + /** * Creates a field instance according to the provided data and schema. * @@ -18,38 +20,94 @@ import { * @param {Object} schema - The field schema object. * @return {Field} Returns the newly created field instance. */ +// function createUnitField(data, schema) { +// data = data || []; +// let partialField; + +// switch (schema.type) { +// case FieldType.MEASURE: +// switch (schema.subtype) { +// case MeasureSubtype.CONTINUOUS: +// partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); +// return new Continuous(partialField, `0-${data.length - 1}`); +// default: +// partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); +// return new Continuous(partialField, `0-${data.length - 1}`); +// } +// case FieldType.DIMENSION: +// switch (schema.subtype) { +// case DimensionSubtype.CATEGORICAL: +// partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); +// return new Categorical(partialField, `0-${data.length - 1}`); +// case DimensionSubtype.TEMPORAL: +// partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema)); +// return new Temporal(partialField, `0-${data.length - 1}`); +// case DimensionSubtype.BINNED: +// partialField = new PartialField(schema.name, data, schema, new BinnedParser()); +// return new Binned(partialField, `0-${data.length - 1}`); +// default: +// partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); +// return new Categorical(partialField, `0-${data.length - 1}`); +// } +// default: +// partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); +// return new Categorical(partialField, `0-${data.length - 1}`); +// } +// } + function createUnitField(data, schema) { data = data || []; - let partialField; switch (schema.type) { case FieldType.MEASURE: - switch (schema.subtype) { - case MeasureSubtype.CONTINUOUS: - partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); - return new Continuous(partialField, `0-${data.length - 1}`); - default: - partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); - return new Continuous(partialField, `0-${data.length - 1}`); + if(fieldRegistry.has(schema.subtype)){ + let field = fieldRegistry.get(schema.subtype) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build() + return field; + } + else { + let field = fieldRegistry.get(MeasureSubtype.CONTINUOUS) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build() + return field; } case FieldType.DIMENSION: - switch (schema.subtype) { - case DimensionSubtype.CATEGORICAL: - partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); - return new Categorical(partialField, `0-${data.length - 1}`); - case DimensionSubtype.TEMPORAL: - partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema)); - return new Temporal(partialField, `0-${data.length - 1}`); - case DimensionSubtype.BINNED: - partialField = new PartialField(schema.name, data, schema, new BinnedParser()); - return new Binned(partialField, `0-${data.length - 1}`); - default: - partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); - return new Categorical(partialField, `0-${data.length - 1}`); + if(fieldRegistry.has(schema.subtype)){ + let field = fieldRegistry.get(schema.subtype) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build() + return field; + }else { + let field = fieldRegistry.get(DimensionSubtype.CATEGORICAL) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build() + return field; } default: - partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); - return new Categorical(partialField, `0-${data.length - 1}`); + return fieldRegistry.get(DimensionSubtype.CATEGORICAL) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build() } } @@ -66,25 +124,44 @@ export function createUnitFieldFromPartial(partialField, rowDiffset) { switch (schema.type) { case FieldType.MEASURE: - switch (schema.subtype) { - case MeasureSubtype.CONTINUOUS: - return new Continuous(partialField, rowDiffset); - default: - return new Continuous(partialField, rowDiffset); + if(fieldRegistry.has(schema.subtype)){ + let field = fieldRegistry.get(schema.subtype) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build() + return field; + } + else { + let field = fieldRegistry.get(MeasureSubtype.CONTINUOUS) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build() + return field; } case FieldType.DIMENSION: - switch (schema.subtype) { - case DimensionSubtype.CATEGORICAL: - return new Categorical(partialField, rowDiffset); - case DimensionSubtype.TEMPORAL: - return new Temporal(partialField, rowDiffset); - case DimensionSubtype.BINNED: - return new Binned(partialField, rowDiffset); - default: - return new Categorical(partialField, rowDiffset); + if(fieldRegistry.has(schema.subtype)){ + let field = fieldRegistry.get(schema.subtype) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build() + return field; + }else { + let field = fieldRegistry.get(DimensionSubtype.CATEGORICAL) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build() + return field; } default: - return new Categorical(partialField, rowDiffset); + return fieldRegistry.get(DimensionSubtype.CATEGORICAL) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build() } } diff --git a/src/fields/field-registry.js b/src/fields/field-registry.js index 43992d8..55a799f 100644 --- a/src/fields/field-registry.js +++ b/src/fields/field-registry.js @@ -7,27 +7,29 @@ import { DimensionSubtype ,MeasureSubtype} from '../enums' class FieldTypeRegistry{ constructor(){ - this._measures = new Map(); - this._dimensions = new Map(); + this._fieldType = new Map(); } - registerMeasure(subtype,measure){ - this._measures.set(subtype,measure); + registerFieldType(subtype,dimension){ + this._fieldType.set(subtype,dimension); return this; } - registerDimension(subtype,dimension){ - this._dimensions.set(subtype,dimension); - return this; + has(type){ + return this._fieldType.has(type); + } + + get(type){ + return this._fieldType.get(type); } } const registerDefaultFields = (store) => { store - .registerDimension(DimensionSubtype.CATEGORICAL,Categorical) - .registerDimension(DimensionSubtype.TEMPORAL,Temporal) - .registerDimension(DimensionSubtype.BINNED,Binned) - .registerMeasure(MeasureSubtype.CONTINUOUS,Continuous) + .registerFieldType(DimensionSubtype.CATEGORICAL,Categorical) + .registerFieldType(DimensionSubtype.TEMPORAL,Temporal) + .registerFieldType(DimensionSubtype.BINNED,Binned) + .registerFieldType(MeasureSubtype.CONTINUOUS,Continuous) } const fieldRegistry = (function () { diff --git a/src/fields/field/index.js b/src/fields/field/index.js index a9db81a..24b64dd 100644 --- a/src/fields/field/index.js +++ b/src/fields/field/index.js @@ -160,8 +160,8 @@ export default class Field { let partialField = null; if(this._params.partialField instanceof PartialField){ partialField = this._params.partialField - }else if(this._params.schema && this.params.data && this.params.parser instanceof FieldParser){ - partialField = new PartialField(this._params.schema.name, this.params.data, this.params.schema, this._context.parser()) + }else if(this._params.schema && this._params.data ){ + partialField = new PartialField(this._params.name, this._params.data, this._params.schema, this._context.parser()) } else { throw new Error("Invalid Field parameters") diff --git a/src/fields/index.js b/src/fields/index.js index 9030c76..71510c6 100644 --- a/src/fields/index.js +++ b/src/fields/index.js @@ -1,14 +1,4 @@ -export { default as Field } from './field'; export { default as Dimension } from './dimension'; -export { default as Categorical } from './categorical'; -export { default as Temporal } from './temporal'; -export { default as Binned } from './binned'; export { default as Measure } from './measure'; -export { default as Continuous } from './continuous'; export { default as FieldParser } from './parsers/field-parser'; -export { default as CategoricalParser } from './parsers/categorical-parser'; -export { default as TemporalParser } from './parsers/temporal-parser'; -export { default as BinnedParser } from './parsers/binned-parser'; -export { default as ContinuousParser } from './parsers/continuous-parser'; -export { default as PartialField } from './partial-field'; export { default as fieldRegistry } from './field-registry'; From 12fab1fb00cb06901844b5b55c106f016695935b Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Wed, 9 Oct 2019 20:11:31 +0530 Subject: [PATCH 10/20] Field Registry complete-refcator --- src/field-creator.js | 100 +++++++++---------------------------------- 1 file changed, 20 insertions(+), 80 deletions(-) diff --git a/src/field-creator.js b/src/field-creator.js index 97760a2..d30765e 100644 --- a/src/field-creator.js +++ b/src/field-creator.js @@ -58,56 +58,23 @@ import { fieldRegistry } from './fields' function createUnitField(data, schema) { data = data || []; - switch (schema.type) { - case FieldType.MEASURE: - if(fieldRegistry.has(schema.subtype)){ - let field = fieldRegistry.get(schema.subtype) - .BUILDER - .fieldName(schema.name) - .schema(schema) - .data(data) - .rowDiffset(`0-${data.length - 1}`) - .build() - return field; - } - else { - let field = fieldRegistry.get(MeasureSubtype.CONTINUOUS) - .BUILDER - .fieldName(schema.name) - .schema(schema) - .data(data) - .rowDiffset(`0-${data.length - 1}`) - .build() - return field; - } - case FieldType.DIMENSION: - if(fieldRegistry.has(schema.subtype)){ - let field = fieldRegistry.get(schema.subtype) - .BUILDER - .fieldName(schema.name) - .schema(schema) - .data(data) - .rowDiffset(`0-${data.length - 1}`) - .build() - return field; - }else { - let field = fieldRegistry.get(DimensionSubtype.CATEGORICAL) - .BUILDER - .fieldName(schema.name) - .schema(schema) - .data(data) - .rowDiffset(`0-${data.length - 1}`) - .build() - return field; - } - default: - return fieldRegistry.get(DimensionSubtype.CATEGORICAL) + if(fieldRegistry.has(schema.subtype)){ + return fieldRegistry.get(schema.subtype) .BUILDER .fieldName(schema.name) .schema(schema) .data(data) .rowDiffset(`0-${data.length - 1}`) .build() + } else { + return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build() + } } @@ -122,46 +89,19 @@ function createUnitField(data, schema) { export function createUnitFieldFromPartial(partialField, rowDiffset) { const { schema } = partialField; - switch (schema.type) { - case FieldType.MEASURE: - if(fieldRegistry.has(schema.subtype)){ - let field = fieldRegistry.get(schema.subtype) - .BUILDER - .partialField(partialField) - .rowDiffset(rowDiffset) - .build() - return field; - } - else { - let field = fieldRegistry.get(MeasureSubtype.CONTINUOUS) - .BUILDER - .partialField(partialField) - .rowDiffset(rowDiffset) - .build() - return field; - } - case FieldType.DIMENSION: - if(fieldRegistry.has(schema.subtype)){ - let field = fieldRegistry.get(schema.subtype) - .BUILDER - .partialField(partialField) - .rowDiffset(rowDiffset) - .build() - return field; - }else { - let field = fieldRegistry.get(DimensionSubtype.CATEGORICAL) - .BUILDER - .partialField(partialField) - .rowDiffset(rowDiffset) - .build() - return field; - } - default: - return fieldRegistry.get(DimensionSubtype.CATEGORICAL) + if(fieldRegistry.has(schema.subtype)){ + return fieldRegistry.get(schema.subtype) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build() + } else { + return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) .BUILDER .partialField(partialField) .rowDiffset(rowDiffset) .build() + } } From 4d082308da195f5d8cd193d24dc64296d6e7a7b4 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Thu, 10 Oct 2019 16:16:08 +0530 Subject: [PATCH 11/20] Eslint fixes --- dist/datamodel.js | 2 +- dist/datamodel.js.map | 2 +- src/converter/dataConverterStore.js | 40 +++---- .../defaultConverters/autoCoverter.js | 16 +-- .../defaultConverters/dsvArrayConverter.js | 14 +-- .../defaultConverters/dsvStringConverter.js | 18 +-- .../defaultConverters/jsonConverter.js | 16 +-- src/converter/index.js | 2 +- src/converter/model/dataConverter.js | 12 +- src/create-fields.spec.js | 12 +- src/datamodel.js | 8 +- src/export.js | 2 +- src/field-creator.js | 103 +++++------------- src/fields/binned/index.js | 4 +- src/fields/categorical/index.js | 4 +- src/fields/continuous/index.js | 6 +- src/fields/field-registry.js | 38 +++---- src/fields/field-registry.spec.js | 0 src/fields/field/index.js | 46 ++++---- src/fields/parsers/temporal-parser/index.js | 4 +- .../parsers/temporal-parser/index.spec.js | 18 +-- src/fields/partial-field/index.js | 2 +- src/fields/partial-field/index.spec.js | 2 +- src/fields/temporal/index.js | 6 +- src/helper.js | 4 +- 25 files changed, 166 insertions(+), 215 deletions(-) create mode 100644 src/fields/field-registry.spec.js diff --git a/dist/datamodel.js b/dist/datamodel.js index 1079edd..a53fee1 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports=JSON.parse('{"name":"datamodel","description":"Relational algebra compliant in-memory tabular data store","homepage":"https://github.com/chartshq/datamodel","version":"2.2.1","license":"MIT","main":"dist/datamodel.js","keywords":["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],"author":"Muzejs.org (https://muzejs.org/)","repository":{"type":"git","url":"https://github.com/chartshq/datamodel.git"},"contributors":[{"name":"Akash Goswami","email":"akashgoswami90s@gmail.com"},{"name":"Subhash Haldar"},{"name":"Rousan Ali","email":"rousanali786@gmail.com","url":"https://rousan.io"},{"name":"Ujjal Kumar Dutta","email":"duttaujjalkumar@live.com"}],"dependencies":{"d3-dsv":"^1.0.8"},"devDependencies":{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0","chai":"3.5.0","cross-env":"^5.0.5","eslint":"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0","jsdoc":"3.5.5","json2yaml":"^1.1.0","karma":"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3","marked":"^0.5.0","mocha":"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0","webpack":"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},"scripts":{"test":"npm run lint && npm run ut","ut":"karma start karma.conf.js","utd":"karma start --single-run false --browsers Chrome karma.conf.js ","build":"npm run build:prod","build:dev":"webpack --mode development","build:prod":"webpack --mode production","start":"webpack-dev-server --config webpack.config.dev.js --mode development --open","lint":"eslint ./src","lint-errors":"eslint --quiet ./src","docs":"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}')},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",function(){return o}),n.d(r,"DimensionSubtype",function(){return u}),n.d(r,"MeasureSubtype",function(){return c}),n.d(r,"FieldType",function(){return f}),n.d(r,"FilteringMode",function(){return l}),n.d(r,"GROUP_BY_FUNCTIONS",function(){return s});var a={};n.r(a),n.d(a,"DSVArr",function(){return rt}),n.d(a,"DSVStr",function(){return mt}),n.d(a,"FlatJSON",function(){return yt}),n.d(a,"Auto",function(){return bt});var i={};n.r(i),n.d(i,"sum",function(){return un}),n.d(i,"avg",function(){return cn}),n.d(i,"min",function(){return fn}),n.d(i,"max",function(){return ln}),n.d(i,"first",function(){return sn}),n.d(i,"last",function(){return dn}),n.d(i,"count",function(){return pn}),n.d(i,"sd",function(){return hn});var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function d(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach(function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)})}var R=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,i=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=B(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach(function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)}),f.fields.forEach(function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)}),T(e._rowDiffset,function(n){var d=!1,h=void 0;T(t._rowDiffset,function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach(function(e){m.push(e.partialField.data[n]),y[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}}),f.fields.forEach(function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}});var g=At(y[l]),b=At(y[s]);if(u(g,b,function(){return e.detachedRoot()},function(){return t.detachedRoot()},{})){var w={};m.forEach(function(e,t){w[i[t].name]=e}),d&&L.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===L.LEFTOUTER||a===L.RIGHTOUTER)&&!d){var O={},_=c.fields.length-1;m.forEach(function(e,t){O[i[t].name]=t<=_?e:null}),d=!0,h=n,o.push(O)}})}),new on(o,i,{name:d})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach(function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))}),r}function Z(e,t,n){var r={label:e[0]};return t.reduce(function(t,r,a){return t[r]=e[1].map(function(e){return e[n[a].index]}),t},r),r}function $(e,t,n){for(var r=void 0,a=void 0,i=void 0,o=n.length-1;o>=0;o--)r=n[o][0],a=n[o][1],(i=Ht(t,r))&&("function"==typeof a?z(e,function(e,t){return a(e[i.index],t[i.index])}):E(a)?function(){var n=q(e,i.index),r=a[a.length-1],o=a.slice(0,a.length-1),u=o.map(function(e){return Ht(t,e)});n.forEach(function(e){e.push(Z(e,o,u))}),z(n,function(e,t){var n=e[2],a=t[2];return r(n,a)}),e.length=0,n.forEach(function(t){e.push.apply(e,W(t[1]))})}():function(){var t=X(i,a);z(e,function(e,n){return t(e[i.index],n[i.index])})}())}var Q,ee=function e(t,n,r,a){if(0===t.length)return n;var i=t[0],o=new Map;n.reduce(function(e,t){var n=t[i.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e},o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var d=l.value,p=K(d,2),h=p[0],v=p[1],m=e(t.slice(1),v,r,a);o.set(h,m),Array.isArray(m)&&$(m,r,a)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter(function(e){return!!Ht(n,e[0])})).length){var a=t.findIndex(function(e){return null===e[1]});a=-1!==a?a:t.length;var i=t.slice(0,a),o=t.slice(a);$(r,n,i),r=function(e,t,n,r){if(0===(n=n.filter(function(e){return null!==e[1]||(r.push(e[0]),!1)})).length)return e;r=r.map(function(e){return Ht(t,e)});var a=ee(r,e,t,n);return e.map(function(e){for(var t=0,n=a;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()})}(r,n,o,i.map(function(e){return e[0]})),e.uids=r.map(function(e){return e.pop()}),e.data=r}}function ne(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach(function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=_e.defaultReducer();return Object.keys(r).forEach(function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=_e.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ge)}),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach(function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}});var m=0;T(e._rowDiffset,function(e){var t="";l.forEach(function(n){t=t+"-"+u[n].partialField.data[e]}),void 0===p[t]?(p[t]=m,h.push({}),l.forEach(function(t){h[m][t]=u[t].partialField.data[e]}),s.forEach(function(t){h[m][t]=[u[t].partialField.data[e]]}),m+=1):s.forEach(function(n){h[p[t]][n].push(u[n].partialField.data[e])})});var y={},g=function(){return e.detachedRoot()};return h.forEach(function(e){var t=e;s.forEach(function(n){t[n]=i[n](e[n],g,y)})}),r?(r.__calculateFieldspace(),v=r):v=new yn(h,d,{name:c}),v}function je(e,t){var n=B(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach(function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)}),r}}function Se(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){T(e._rowDiffset,function(e){var r={},o="";a.forEach(function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a}),n[o]||(i.push(r),n[o]=!0)})}return e._colIdentifier.split(",").forEach(function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)}),s(e,c),s(t,f),new yn(i,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,L.LEFTOUTER)}function Fe(e,t,n){return G(t,e,n,!1,L.RIGHTOUTER)}var Ne=function(){function e(e,t){for(var n=0;nn&&(n=a))}),[t,n]}}]),t}(),He=function(){function e(e,t){for(var n=0;n9999?"+"+st(t,6):st(t,4))+"-"+st(e.getUTCMonth()+1,2)+"-"+st(e.getUTCDate(),2)+(i?"T"+st(n,2)+":"+st(r,2)+":"+st(a,2)+"."+st(i,3)+"Z":a?"T"+st(n,2)+":"+st(r,2)+":"+st(a,2)+"Z":r||n?"T"+st(n,2)+":"+st(r,2)+"Z":"")}var pt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,a=[],i=e.length,o=0,u=0,c=i<=0,f=!1;function l(){if(c)return it;if(f)return f=!1,at;var t,r,a=o;if(e.charCodeAt(a)===ot){for(;o++=i?c=!0:(r=e.charCodeAt(o++))===ut?f=!0:r===ct&&(f=!0,e.charCodeAt(o)===ut&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,_t(a))):e._derivation.push({op:t,meta:r,criteria:a})},Dt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,_t(e._ancestorDerivation).concat(_t(e._derivation)))},Ft=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=arguments[4];St(t,n,r,a),Dt(e,t)},Nt=(Ot(gt={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ot(gt,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ot(gt,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),gt),kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Tt=function(e,t,n){var r=[],a=[],i=wt(Nt[n].calcDiff,2),o=i[0],u=i[1];return T(e,function(e){var n=t(e);n&&o&&kt(r,e,-1),!n&&u&&kt(a,e,-1)}),{rowDiffset:r.join(","),rejectRowDiffset:a.join(",")}},Rt=function(e,t,n,r,a){var i={},o={},u={};return T(e,function(e){if(t(e)){var n="",c={keys:{}};r.forEach(function(t){var r=a[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r}),void 0===o[n]&&(o[n]=[],i[n]=-1,u[n]=c),kt(o[n],e,i[n]),i[n]=e}}),{splitRowDiffset:o,dimensionMap:u}},Ct=function(e,t,n,r,a){var i={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map(function(e){return e.formattedData()}),s=f.map(function(e){return e.data()});return a(c,function(e){return t(Et(f,l,s,e),e,o,i)},u)},Mt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map(function(e){return e.name()}).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},It=function(e,t,n){for(var r=n(e,t,0),a=1,i=e.length;a2&&void 0!==arguments[2]?arguments[2]:{},r=[],a=n.operation||V,i=n.filterByMeasure||!1,o=Mt(e),u=o.getFieldsConfig();r=t.length?t.map(function(e){return n=void 0,r=(t=e).getData(),a=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter(function(e){return e in u}),c=o.length,f=o.map(function(e){return a[e].index}),l=Object.keys(t.getFieldspace().getMeasure()).filter(function(e){return e in u}),s=t.getFieldspace().fieldsObj(),d=r.data,p=l.reduce(function(e,t){return e[t]=s[t].domain(),e},{}),h={},n=function(e,t,n){return t[e[n]]},c&&d.forEach(function(e){var t=It(f,e,n);h[t]=1}),n=function(e,t,n){return t[e[n]].internalValue},d.length?function(e){var t=!c||h[It(o,e,n)];return i?l.every(function(t){return e[t].internalValue>=p[t][0]&&e[t].internalValue<=p[t][1]})&&t:t}:function(){return!1};var t,n,r,a,o,c,f,l,s,d,p,h}):[function(){return!1}];return a===V?o.select(function(e){return r.every(function(t){return t(e)})},{saveChild:!1}):o.select(function(e){return r.some(function(t){return t(e)})},{saveChild:!1})},xt=function(e,t,n,r,a){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Ft(n,e,U.SELECT,{config:r},a)},Ut=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter(function(e){return-1===t.indexOf(e)})),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),Ft(e,a,U.PROJECT,{projField:t,config:n,actualProjField:i},null),a},Lt=function(e,t,n,r){return t.map(function(t){return Ut(e,t,n,r)})},Vt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},Yt=function(e){return e.map(function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,a=e.subtype,i=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(a))throw new Error("DataModel doesn't support dimension field subtype "+a+" used for "+i+" field");break;case f.MEASURE:if(-1===t.indexOf(a))throw new Error("DataModel doesn't support measure field subtype "+a+" used for "+i+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+i+" field")}}(e=Vt(e)),e})},Bt=function(e,t,n,r){n=Yt(n),r=Object.assign(Object.assign({},nt),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,n,r),c=wt(u,2),f=c[0],l=c[1];!function(e,t){e.forEach(function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}})}(n,f);var s=tt(l,n,f),d=F.createNamespace(s,r.name);e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"";var p=[],h=d.fields,v=h.map(function(e){return e.data()}),m=h.map(function(e){return e.formattedData()});return T(e._rowDiffset,function(e){p[e]=Et(h,m,v,e)}),d._cachedValueObjects=p,e._colIdentifier=n.map(function(e){return e.name}).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},Ht=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];if(t!==i){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach(function(t){var i=Gt(n,t);e(t,i,r,a)})}},zt=function(e){for(;e._parent&&e._derivation.find(function(e){return e.op!==U.GROUPBY});)e=e._parent;return e},Kt=function(e){for(;e._parent;)e=e._parent;return e},Wt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},Xt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],a=[];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter(function(e){return e.config.sourceId!==c}));var p=d.filter(function(e){return(r.filterFn||function(){return!0})(e,r)}).map(function(e){return e.config.criteria}),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach(function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter(function(t){return t!==e}).map(function(e){return e.config.criteria})).length&&l.push({criteria:a,models:e.model,path:Wt(e.model)}))})}a=(s=[]).concat.apply(s,[].concat(_t(p),[e])).filter(function(e){return null!==e}),l.push({criteria:a,excludeModels:[].concat(h,_t(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=Pt(g,a,{filterByMeasure:f}),Jt(g,i,y)),l.forEach(function(e){var t=Pt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}},$t=function(e,t,n){var r=e.reduce(function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,_t(t.filter(function(e){return-1!==e.search(r)}))):r in n&&e.push(r),e},[]);return Array.from(new Set(r)).map(function(e){return e.trim()})},Qt=function(e,t){return e.numberFormat?e.numberFormat()(t):t},en=function(){function e(e,t){for(var n=0;n1?(a=e.clone(r.saveChild),xt(a,u[c[1]],e,n,t),[o,a]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=$t(e,a,r),u=void 0;i===l.ALL?u=[Ut(this,o,{mode:l.NORMAL,saveChild:t.saveChild},a),Ut(this,o,{mode:l.INVERSE,saveChild:t.saveChild},a)]:u=Ut(this,o,t,a);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce(function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e},{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach(function(e){e._parent=null}),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex(function(t){return t===e});-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),nn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},rn=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=Ae.apply(void 0,a);return Ft(this,i,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:_e.defaultReducer()},t),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map(function(e){return e.name}),a=[r].concat(n.data),i=new this.constructor(a,n.schema,{dataFormat:"DSVArr"});return Ft(this,i,U.SORT,t,e),t.saveChild?i.setParent(this):i.setParent(null),i}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map(function(e){return e.formattedData()}),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach(function(t,n){t[e.name()]=new k(a[n],i[n],e)});return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=Vt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map(function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index}),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map(function(e){return f[e]}),s={},d=function(){return r.detachedRoot()},p=[];T(c._rowDiffset,function(e){var t=l.map(function(t){return t.partialField.data[e]});p[e]=o.apply(void 0,an(t).concat([e,d,s]))});var h=tt([p],[e],[e.name]),v=nn(h,1)[0];return c.addField(v),Ft(this,c,U.CAL_VAR,{config:e,fields:i},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=Kt(this),c=u._propagationNameSpace,f=zt(this),l={groupByModel:f,model:u};return n&&Zt(c,t,this),Xt(e,l,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&qt(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach(function(r){return r.call(n,e,t)})}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],a=r.saveChild,i=e.getFieldspace().fieldsObj(),o=Ct(e.clone(a),n,r,e,function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},a=this.getFieldsConfig(),i=Object.keys(a),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach(function(e,n){o[n]=$t([].concat(an(e),an(t)),i,a)}),Lt(this,o,n,i)}}],[{key:"configureInvalidAwareTypes",value:function(e){return M.invalidAwareVals(e)}},{key:"Reducers",get:function(){return _e}}]),t}(),un=ye.sum,cn=ye.avg,fn=ye.min,ln=ye.max,sn=ye.first,dn=ye.last,pn=ye.count,hn=ye.std,vn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach(function(e){r=e(r),i.push.apply(i,Y(r._derivation)),a||(a=r)}),a&&a!==r&&a.dispose(),r._ancestorDerivation=[],Ft(e,r,U.COMPOSE,null,i),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;ni.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,a=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};y.forEach((function(e,t){w[a[t].name]=e})),p&&L.CROSS!==i?o[h]=w:(o.push(w),p=!0,h=n)}else if((i===L.LEFTOUTER||i===L.RIGHTOUTER)&&!p){var _={},O=c.fields.length-1;y.forEach((function(e,t){_[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(_)}}))})),new En(o,a,{name:p})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=un(t,r))&&("function"==typeof i?z(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=q(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return un(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(a,i);z(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var Q,ee=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=K(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&$(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!un(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);$(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return un(t,e)}));var i=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Oe.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Oe.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=be)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new Cn(h,p,{name:c}),v}function Ae(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new Cn(a,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,L.LEFTOUTER)}function Se(e,t,n){return G(t,e,n,!1,L.RIGHTOUTER)}var Te=function(){function e(e,t){for(var n=0;nn&&(n=i))})),[t,n]}}],[{key:"parser",value:function(){return new $e}}]),t}(Ue),tt=function(){function e(e,t){for(var n=0;n9999?"+"+yt(t,6):yt(t,4))+"-"+yt(e.getUTCMonth()+1,2)+"-"+yt(e.getUTCDate(),2)+(a?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"."+yt(a,3)+"Z":i?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"Z":r||n?"T"+yt(n,2)+":"+yt(r,2)+"Z":"")}var bt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return lt;if(f)return f=!1,ft;var t,r,i=o;if(e.charCodeAt(i)===st){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===pt?f=!0:r===dt&&(f=!0,e.charCodeAt(o)===pt&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Lt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Gt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Lt(e._ancestorDerivation).concat(Lt(e._derivation)))},Jt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Gt(e,t)},zt=(Ut(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ut(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ut(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=xt(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return i(c,(function(e){return t(Vt(f,l,s,e),e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n){for(var r=n(e,t,0),i=1,a=e.length;i2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||V,a=n.filterByMeasure||!1,o=Zt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),i=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return i[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),p=r.data,d=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&p.forEach((function(e){var t=$t(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},p.length?function(e){var t=!c||h[$t(o,e,n)];return a?l.every((function(t){return e[t].internalValue>=d[t][0]&&e[t].internalValue<=d[t][1]}))&&t:t}:function(){return!1};var t,n,r,i,o,c,f,l,s,p,d,h})):[function(){return!1}];return i===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},en=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Jt(n,e,U.SELECT,{config:r},i)},tn=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Jt(e,i,U.PROJECT,{projField:t,config:n,actualProjField:a},null),i},nn=function(e,t,n,r){return t.map((function(t){return tn(e,t,n,r)}))},rn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},an=function(e){return e.map((function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,i=e.subtype,a=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(i))throw new Error("DataModel doesn't support dimension field subtype "+i+" used for "+a+" field");break;case f.MEASURE:if(-1===t.indexOf(i))throw new Error("DataModel doesn't support measure field subtype "+i+" used for "+a+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+a+" field")}}(e=rn(e)),e}))},on=function(e,t,n,r){n=an(n),r=Object.assign(Object.assign({},ot),r);var i=Mt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=xt(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=at(f,n,c),s=S.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=Vt(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},un=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.nonTraversingModel,o=i.excludeModels||[];if(t!==a){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=cn(n,t);e(t,a,r,i)}))}},ln=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==U.GROUPBY}));)e=e._parent;return e},sn=function(e){for(;e._parent;)e=e._parent;return e},pn=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},dn=function(e,t,n,r){var i=void 0,a=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],i=[];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter((function(e){return e.config.sourceId!==c})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:i,models:e.model,path:pn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Lt(d),[e])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Lt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(a=Qt(b,i,{filterByMeasure:f}),fn(b,a,m)),l.forEach((function(e){var t=Qt(y,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},yn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Lt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},mn=function(e,t){return e.numberFormat?e.numberFormat()(t):t},bn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),en(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=yn(e,i,r),u=void 0;a===l.ALL?u=[tn(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),tn(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=tn(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),wn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},_n=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=je.apply(void 0,i);return Jt(this,a,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Oe.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Jt(this,a,U.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new F(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=rn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,On(t).concat([e,p,s]))}));var h=at([d],[e],[e.name]),v=wn(h,1)[0];return c.addField(v),Jt(this,c,U.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=sn(this),c=u._propagationNameSpace,f=ln(this),l={groupByModel:f,model:u};return n&&vn(c,t,this),dn(e,l,{propagationNameSpace:c,sourceId:a},Object.assign({payload:o},t)),i&&hn(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=yn([].concat(On(e),On(t)),a,i)})),nn(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Oe}},{key:"Converters",get:function(){return Mt}},{key:"FieldTypes",get:function(){return it}}]),t}(gn),jn=me.sum,An=me.avg,kn=me.min,Dn=me.max,Sn=me.first,Tn=me.last,Fn=me.count,Nn=me.std,Rn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,B(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Jt(e,r,U.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr[0];\n arr.splice(0, 1)[0];\n }\n // create a map of the headers\n let headerMap = {}\n headers.map((h,i) => {\n headerMap[h] = i;\n return h\n })\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n let y = headerMap[schemaField];\n if (fields[y] === undefined) {\n field.push(null);\n } else {\n field.push(fields[y]);\n }\n return schemaField;\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/defaultConverters/autoCoverter.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","len","store","fields","forEach","fieldIndex","Array","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","rawValue","formattedValue","getNumberFormattedVal","defineProperties","_value","configurable","writable","_formattedValue","_internalValue","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","formattedData","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","dataType","sortType","retFunc","getSortFn","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","applyStandardSort","sortingDetails","fieldName","sortMeta","fieldInSchema","sortingFn","slice","f","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","sortData","dataObj","filter","sDetial","groupSortingIdx","findIndex","standardSortingDetails","groupSortingDetails","detail","sortedGroupMap","row","nextMap","applyGroupSort","uids","pop","dataBuilder","colIdentifier","addUid","columnWise","retObj","reqSorting","tmpDataArr","colName","insertInd","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","filteredValues","min","max","sqrt","mean","num","variance","defaultReducerName","ReducerStore","defReducer","entries","reducer","__unregister","delete","Function","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","rowCount","hash","_","cachedStore","cloneProvider","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","internalValue","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","PartialField","_sanitize","FieldParser","Field","subtype","description","displayName","_params","_context","build","Dimension","_cachedDomain","calculateDataDomain","Measure","unit","numberFormat","CategoricalParser","isInvalid","getInvalidType","trim","Categorical","Set","domain","add","TemporalParser","_dtf","Temporal","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","dataFormat","parsedDatum","BinnedParser","matched","parseFloat","Binned","binsArr","bins","ContinuousParser","Continuous","NEGATIVE_INFINITY","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","fieldRegistry","createFields","dataColumn","headers","headersObj","header","BUILDER","createUnitField","DataConverter","_type","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","JSON","stringify","inferColumns","rows","columnSet","column","width","formatDate","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","N","I","eof","eol","j","preformatBody","formatValue","formatRow","test","convert","customConverter","concat","formatBody","formatRows","csv","dsv","tsv","DSVArr","schemaFields","unitSchema","firstRowHeader","columnMajor","headerMap","h","schemaField","headIndex","DSVStr","fieldSeparator","d3Dsv","DSVStringConverter","FlatJSON","insertionIndex","schemaFieldsName","JSONConverter","DSVArrayConverter","Auto","converters","AutoDataConverter","DataConverterStore","_getDefaultConverters","converter","converterStore","prepareSelectionData","rawData","resp","updateFields","partialFieldspace","fieldStoreName","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistCurrentDerivation","model","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","persistDerivations","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","getPartialFieldspace","formattedFieldsData","rawFieldsData","cloneWithAllFields","clone","calculateFieldsConfig","getKey","fn","filterPropagationModel","propModels","fns","filterByMeasure","clonedModel","modelFieldsConfig","getFieldsConfig","propModel","keyFn","getData","fieldsConfig","dLen","indices","fieldsSpace","v","valuesMap","present","every","select","saveChild","some","addDiffsetToClonedDm","selectConfig","cloneWithProject","projField","allFields","cloned","projectionSet","actualProjField","splitWithProject","projFieldSet","projFields","sanitizeUnitSchema","sanitizeAndValidateSchema","supportedMeasureSubTypes","supportedDimSubTypes","GEO","validateUnitSchema","updateData","relation","defaultConfig","dataHeader","fieldNameAs","as","resolveFieldName","nameSpace","_partialFieldspace","valueObjects","_cachedValueObjects","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","derivation","params","groupByString","getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","propagate","handlePropagation","children","_children","child","getRootGroupByModel","_parent","find","getRootDataModel","getPathToRootModel","path","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","values","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","propagateImmutableActions","immutableActions","filterImmutableAction","criteriaModel","addToPropNamespace","sourceNamespace","isMutableAction","getNormalizedProFields","fieldConfig","normalizedProjField","constructor","search","Relation","source","_fieldStoreName","_propagationNameSpace","_fieldspace","joinWith","unionWith","differenceWith","defConfig","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithSelect","setParent","_fieldConfig","fieldObj","def","removeChild","sibling","parent","_onPropagation","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","ids","fill","fieldsArr","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","cachedValueObjects","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","addField","addToNameSpace","payload","eventName","measureFieldName","binFieldName","measureField","binsCount","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","clonedDMs","splitWithSelect","uniqueFields","commonFields","normalizedProjFieldSets","fieldSet","first","last","count","sd","std","Operators","compose","operations","currentDM","firstChild","dispose","bin","project","calculateVariable","naturalJoin","fullOuterJoin","version","Stats","FieldsUtility","enums"],"mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,YAAa,GAAIH,GACE,iBAAZC,QACdA,QAAmB,UAAID,IAEvBD,EAAgB,UAAIC,IARtB,CASGK,QAAQ,WACX,O,YCTE,IAAIC,EAAmB,GAGvB,SAASC,EAAoBC,GAG5B,GAAGF,EAAiBE,GACnB,OAAOF,EAAiBE,GAAUP,QAGnC,IAAIC,EAASI,EAAiBE,GAAY,CACzCC,EAAGD,EACHE,GAAG,EACHT,QAAS,IAUV,OANAU,EAAQH,GAAUI,KAAKV,EAAOD,QAASC,EAAQA,EAAOD,QAASM,GAG/DL,EAAOQ,GAAI,EAGJR,EAAOD,QA0Df,OArDAM,EAAoBM,EAAIF,EAGxBJ,EAAoBO,EAAIR,EAGxBC,EAAoBQ,EAAI,SAASd,EAASe,EAAMC,GAC3CV,EAAoBW,EAAEjB,EAASe,IAClCG,OAAOC,eAAenB,EAASe,EAAM,CAAEK,YAAY,EAAMC,IAAKL,KAKhEV,EAAoBgB,EAAI,SAAStB,GACX,oBAAXuB,QAA0BA,OAAOC,aAC1CN,OAAOC,eAAenB,EAASuB,OAAOC,YAAa,CAAEC,MAAO,WAE7DP,OAAOC,eAAenB,EAAS,aAAc,CAAEyB,OAAO,KAQvDnB,EAAoBoB,EAAI,SAASD,EAAOE,GAEvC,GADU,EAAPA,IAAUF,EAAQnB,EAAoBmB,IAC/B,EAAPE,EAAU,OAAOF,EACpB,GAAW,EAAPE,GAA8B,iBAAVF,GAAsBA,GAASA,EAAMG,WAAY,OAAOH,EAChF,IAAII,EAAKX,OAAOY,OAAO,MAGvB,GAFAxB,EAAoBgB,EAAEO,GACtBX,OAAOC,eAAeU,EAAI,UAAW,CAAET,YAAY,EAAMK,MAAOA,IACtD,EAAPE,GAA4B,iBAATF,EAAmB,IAAI,IAAIM,KAAON,EAAOnB,EAAoBQ,EAAEe,EAAIE,EAAK,SAASA,GAAO,OAAON,EAAMM,IAAQC,KAAK,KAAMD,IAC9I,OAAOF,GAIRvB,EAAoB2B,EAAI,SAAShC,GAChC,IAAIe,EAASf,GAAUA,EAAO2B,WAC7B,WAAwB,OAAO3B,EAAgB,SAC/C,WAA8B,OAAOA,GAEtC,OADAK,EAAoBQ,EAAEE,EAAQ,IAAKA,GAC5BA,GAIRV,EAAoBW,EAAI,SAASiB,EAAQC,GAAY,OAAOjB,OAAOkB,UAAUC,eAAe1B,KAAKuB,EAAQC,IAGzG7B,EAAoBgC,EAAI,GAIjBhC,EAAoBA,EAAoBiC,EAAI,G,yjEClFrD,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,G,s0BCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCCKC,EANU,CACrBC,YAAa,cACbC,SAAU,WACVC,OAAQ,UCCGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,EAXY,CACvBC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAEC9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,EAAG,CAECM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,EAAG,CAECvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAECtG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAECvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAECzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAEC3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAEC7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAECjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAEClH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,EAAG,CAECC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAECpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAECvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,EAAG,CAECG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAECxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,EAAG,CAECjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI,UAAK8F,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAX,YAAWA,IACX,kBAASA,OAAT,YAASA,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU8C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIJ,SACAK,SACAC,SACAzE,SAcJ,OAZIqE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGvD,OAAOuD,EAAO,MACxBC,GAAO,GAGXN,EAAcE,GAEdF,EADOE,GAGOD,EAGbD,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCM,IACAzE,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkB+F,WAAa,SAAU9F,GAQrC,IAPA,IAAM+F,EAAchG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCmE,EAAgBzJ,OAAO0J,KAAK1B,GAC5B2B,EAAa,GACfrK,SACAsK,UAEItK,EAAImE,EAAOoG,QAAQL,EAAalK,EAAI,KAAO,GAC/CsK,EAAcnG,EAAOnE,EAAI,IACmB,IAAxCmK,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZlE,MAAOtG,EACPyK,MAAOH,IAIf,OAAOD,GASXnG,EAAkBwG,SAAW,SAAU3G,EAAMI,GACzC,IAQIlE,EARE0K,EAAQ7G,EAAoBC,GAC5BsG,EAAanG,EAAkB+F,WAAW9F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC4E,EAAeC,OAAO1G,GACpB+F,EAAchG,EAAkBU,aAClC6F,SACAK,SACA9K,SAGJ,IAAKA,EAAI,EAAGC,EAAIoK,EAAWtE,OAAQ/F,EAAIC,EAAGD,IAEtC8K,EAAepC,EADf+B,EAAQJ,EAAWrK,GAAGyK,OACYhE,UAAUkE,GAC5CC,EAAeA,EAAajG,QAAQ,IAAIH,OAAO0F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX1G,EAAkBtC,UAAUmJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM1B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK8G,kBAAkBF,GAClCG,EAAajH,EAAkBW,wBAC/BuG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAlG,SACAvF,SACA0L,SACAC,SACA1L,SACA+H,EAAS,GAEb,IAAKuD,KAAehC,EAChB,GAAK,GAAG1H,eAAe1B,KAAKoJ,EAAegC,GAA3C,CAMA,IAJAD,EAAKvF,OAAS,EAEd0F,GADAD,EAAiBjC,EAAcgC,IACHK,OAAOJ,EAAezF,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIuL,EAAezF,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNqH,EAAQF,EAAexL,IACFO,OAGjB+K,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOnG,IAM1B,GAAI,OAFJoG,EAAcF,EAAWI,MAAMzH,KAAMkH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWtF,QAAU3B,KAAK0H,gBAAgBT,EAAWtF,QAErDiC,EAAO+D,QAAQV,EAAW,GAAI,EAAG,GAEjCrD,EAAO+D,QAAP,MAAA/D,EAAkBqD,GAGfrD,GAQX9D,EAAkBtC,UAAUsJ,kBAAoB,SAAUF,GACtD,IAYI/K,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCkE,EAAchG,EAAkBU,aAChCyF,EAAanG,EAAkB+F,WAAW9F,GAC1C6H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGArM,SAEJqM,EAAcxB,OAAO1G,GAErB,IAAMmI,EAAWjC,EAAWkC,KAAI,SAAAC,GAAA,OAAOA,EAAI/B,SACrCgC,EAAmBpC,EAAWtE,OACpC,IAAK/F,EAAIyM,EAAmB,EAAGzM,GAAK,EAAGA,KACnCmM,EAAW9B,EAAWrK,GAAGsG,OAEV,IAAM+F,EAAYtG,OAAS,QAKdzB,IAAxB2H,IACAA,EAAsBI,EAAYtG,QAGtCqG,EAAaC,EAAYpE,UAAUkE,EAAW,EAAGF,GACjDI,EAAcA,EAAYpE,UAAU,EAAGkE,EAAW,GAC9C3H,OAAOC,OAAO2H,GACdC,EAAYpE,UAAUgE,EAAqBI,EAAYtG,QAE3DkG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKnM,EAAI,EAAGA,EAAIyM,EAAkBzM,IAC9BkM,EAAS7B,EAAWrK,GACpBqM,EAAcA,EAAY1H,QAAQuF,EAAcgC,EAAOzB,MAAO/B,EAAYwD,EAAOzB,OAAOlE,WAG5F,IAAMmG,EAAgB1B,EAAc2B,MAAM,IAAInI,OAAO6H,KAAiB,GAGtE,IAFAK,EAAcE,QAET5M,EAAI,EAAGC,EAAIqM,EAASvG,OAAQ/F,EAAIC,EAAGD,IACpCgM,EAASM,EAAStM,IAAM0M,EAAc1M,GAE1C,OAAOgM,GAQX9H,EAAkBtC,UAAUiL,cAAgB,SAAU7B,GAClD,IAAIjH,EAAO,KACX,GAAI+I,OAAOrH,SAASuF,GAChBjH,EAAO,IAAIC,KAAKgH,QACb,IAAK5G,KAAKD,QAAUH,KAAK+G,MAAMC,GAClCjH,EAAO,IAAIC,KAAKgH,OAEf,CACD,IAAM3G,EAAWD,KAAKC,SAAWD,KAAK2G,MAAMC,GACxC3G,EAAS0B,SACT3B,KAAKG,WAAL,kCAAsBP,KAAtB,c,sHAAA,CAA8BK,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUkK,gBAAkB,SAASiB,GACnD,OAAe,IAARA,GAAa3I,KAAKD,OAAOwI,MAAM,QAAQ5G,QASlD7B,EAAkBtC,UAAU8I,SAAW,SAAUvG,EAAQ6G,GACrD,IAAIzG,SAQJ,OANIyG,EACAzG,EAAaH,KAAKG,WAAaH,KAAKyI,cAAc7B,IACzCzG,EAAaH,KAAKG,cAC3BA,EAAaH,KAAKyI,cAAc7B,IAG7B9G,EAAkBwG,SAASnG,EAAYJ,ICruBnC,eAAC6I,GACZ,IAAIhN,EAAI,EACR,OAAO,WAAe,2BAAXiN,EAAW,qBAAXA,EAAW,gBAClBA,EAAOC,SAAQ,SAAC3H,EAAK4H,GACXH,EAAMG,aAAuBC,QAC/BJ,EAAMG,GAAcC,MAAMC,KAAK,CAAEtH,OAAQ/F,KAE7CgN,EAAMG,GAAY3C,KAAKjF,MAE3BvF,M,4MCdFsN,EAAe,SACfC,EAAgB7M,OAAOkB,UAAU+E,SACjC6G,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAelB,EAAKmB,GAIzB,IAHA,IAAI3N,EAAI2N,EAAU5H,OACd6H,GAAU,EAEP5N,GAAG,CACN,GAAIwM,IAAQmB,EAAU3N,GAElB,OADA4N,EAAS5N,EAGbA,GAAK,EAGT,OAAO4N,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,cAAOA,MAASR,SAAgB,IAAOS,EAAP,cAAOA,MAAST,EACzC,WAGP,IAAOS,EAAP,cAAOA,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,cAAOA,MAASR,IAChBQ,EAAOC,aAAgBX,MAAQ,GAAK,IAnH5C,SAASa,EAAMH,EAAMC,EAAMC,EAAWE,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAO1D,KAAKsD,GACZK,EAAO3D,KAAKuD,KALZG,EAAS,CAACJ,GACVK,EAAS,CAACJ,IAOVA,aAAgBX,MAChB,IAAKgB,EAAO,EAAGA,EAAOL,EAAKhI,OAAQqI,GAAQ,EAAG,CAC1C,IACIC,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,eAGA,IAAO6G,EAAP,cAAOA,MAAWhB,EACZU,QAAwB1J,IAAXgK,IACfR,EAAKM,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQE,aAAkBlB,MAAQ,GAAK,KAG3C,KADdoB,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,SAMrD,IAAKC,KAAQL,EAAM,CACf,IACIM,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,SAGJ,GAAe,OAAX6G,SAAmB,IAAOA,EAAP,cAAOA,MAAWhB,GAKrCiB,EAAMhB,EAAcpN,KAAKmO,MACbd,GACO,OAAXa,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAGxCI,IAAQd,GACE,OAAXY,GAAqBA,aAAkBjB,QACvCiB,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAI7CL,EAAKM,GAAQE,MAGhB,CACD,GAAIN,QAAwB1J,IAAXgK,EACb,SAEJR,EAAKM,GAAQE,GAIzB,OAAOR,EAiBPG,CAAMH,EAAMC,EAAMC,GACXF,GCnIJ,SAASW,EAASlJ,GACrB,OAAO6H,MAAMqB,QAAQlJ,GA2ClB,IAAMmJ,EAAc,wBAAY,IAAI1K,MAAO2K,UAAYvG,KAAKwG,MAAsB,IAAhBxG,KAAKyG,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKhJ,SAAWiJ,EAAKjJ,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAI+O,EAAKhJ,OAAQ/F,IAC7B,GAAI+O,EAAK/O,KAAOgP,EAAKhP,GACjB,OAAO,EAIf,OAAO,EASJ,SAASiP,EAAa1J,GACzB,OAAOA,EASJ,IAAM2J,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFhN,EAAWE,QACXoM,EAAQU,IAASV,EAAQU,EAAK,IAC9BhN,EAAWG,QACXmM,EAAQU,KAA0B,IAAhBA,EAAKpJ,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4B6J,CAASD,EAAK,KACrDhN,EAAWC,UAEf,MChDIiN,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUhP,GACvB,IAAMiP,EAASjP,GAAQmO,IA4CvB,OA1CAtK,KAAK+K,KAAKK,GAAU,CAChBjP,KAAMiP,EACNvC,OAAQsC,EAERE,UAJgB,WAKZ,IAAIA,EAAYrL,KAAKsL,iBAQrB,OANKD,IACDA,EAAYrL,KAAKsL,iBAAmB,GACpCtL,KAAK6I,OAAOC,SAAQ,SAACyC,GACjBF,EAAUE,EAAMpP,QAAUoP,MAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBzL,KAAK0L,eAUzB,OARKD,IACDA,EAAgBzL,KAAK0L,eAAiB,GACtC1L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUC,UAClC8M,EAAcF,EAAMpP,QAAUoP,OAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkB9L,KAAK+L,iBAU3B,OARK/L,KAAK+L,mBACND,EAAkB9L,KAAK+L,iBAAmB,GAC1C/L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUE,YAClCkN,EAAgBP,EAAMpP,QAAUoP,OAIrCO,IAGR9L,KAAK+K,KAAKK,K,yPCqCVY,E,WAxEX,WAAanP,EAAOoP,EAAUV,I,4FAAO,SACjC,IAAMW,EAAiBC,GAAsBZ,EAAO1O,GAEpDP,OAAO8P,iBAAiBpM,KAAM,CAC1BqM,OAAQ,CACJ7P,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,SAEJ2P,gBAAiB,CACbhQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOqP,GAEXO,eAAgB,CACZjQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOoP,KAIfjM,KAAKuL,MAAQA,E,6CAkCb,OAAO9E,OAAOzG,KAAKnD,S,gCAUnB,OAAOmD,KAAKnD,Q,4BAnCZ,OAAOmD,KAAKqM,S,qCAOZ,OAAOrM,KAAKwM,kB,oCAOZ,OAAOxM,KAAKyM,mB,KCxDb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWhL,OAAS,GACDgL,EAAWE,MAAM,KACzB/D,SAAQ,SAACgE,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAIpR,EAAIoR,EAAOpR,GAAKqR,EAAKrR,GAAK,EAC/BgR,EAAShR,M,6PCVvBsR,E,WAqBF,WAAarQ,I,4FAAO,SAChBmD,KAAKqM,OAASxP,E,wDAdOsQ,GACrB,OAAKA,EAGE7Q,OAAO8Q,OAAOF,EAAkBG,qBAAsBF,GAFlDD,EAAkBG,yB,mCAsB7B,OAAOrN,KAAKqM,S,iCAUZ,OAAO5F,OAAOzG,KAAKqM,W,iCAGNlL,GACb,OAAQA,aAAe+L,KAAwBA,EAAkBI,mBAAmBnM,K,qCAGlEA,GAClB,OAAOA,aAAe+L,EAAoB/L,EAAM+L,EAAkBI,mBAAmBnM,O,KAO7F+L,EAAkBK,KAAO,IAAIL,EAAkB,QAC/CA,EAAkBM,GAAK,IAAIN,EAAkB,MAC7CA,EAAkBO,IAAM,IAAIP,EAAkB,OAO9CA,EAAkBG,qBAAuB,CACrCK,QAASR,EAAkBM,GAC3BG,IAAKT,EAAkBO,IACvBG,KAAMV,EAAkBK,KACxBrN,UAAWgN,EAAkBM,IAGlBN,Q,8YC5ETW,EAAkB,SAACC,EAASd,EAAOC,GAIrC,IAHA,IAAMc,EAAU,GACZC,EAAOhB,EAEJgB,EAAOf,GACVc,EAAQ3H,KAAK4H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ3H,KAAK4H,GAEND,GAGLE,EAAkB,SAACC,EAAcrR,GAOnC,IANA,IAAIsR,EAAU,EACVC,EAAWF,EAAavM,OAAS,EACjC0M,SACA7M,SAGG2M,GAAWC,GAAU,CAIxB,GAAIvR,IAFJ2E,EAAQ0M,EADRG,EAASF,EAAUnK,KAAKsK,OAAOF,EAAWD,GAAW,KAGlCnB,OAASnQ,EAAQ2E,EAAMyL,IACtC,OAAOzL,EACA3E,GAAS2E,EAAMyL,IACtBkB,EAAUE,EAAS,EACZxR,EAAQ2E,EAAMwL,QACrBoB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,EAAiB,CAC1BC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,MACLC,KAAM,QAGGC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,M,wHCzBF,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAI1G,OAAOC,SAAQ,SAACyC,GAChBmE,EAAOtJ,KAAKmF,EAAMI,SAASxP,SAE/BqT,EAAI3G,OAAOC,SAAQ,SAACyC,IAC6B,IAAzCmE,EAAOvJ,QAAQoF,EAAMI,SAASxP,OAC9BsT,EAAOrJ,KAAKmF,EAAMI,SAASxP,SAG5BsT,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqD,wDAAxBC,EAAwB,uDAAblB,EAAMC,MACtFrD,EAAS,GACTZ,EAAO,GACPmF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAchU,KAClCoU,EAAoBF,EAAclU,KAClCA,EAAUgU,EAAchU,KAAxB,IAAgCkU,EAAclU,KAC9CqU,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CAqFpB,OAlFAN,EAActH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,OAAiB6T,IACpDU,EAAUvU,KAAUgU,EAAchU,KAAlC,IAA0CuU,EAAUvU,MAExDwP,EAAOvF,KAAKsK,MAEhBL,EAAcxH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,MAC9B6T,IACDU,EAAUvU,KAAUkU,EAAclU,KAAlC,IAA0CuU,EAAUvU,KACpDwP,EAAOvF,KAAKsK,IAGhB/E,EAAOvF,KAAKsK,MAKpBhE,EAAmBmD,EAAIc,aAAa,SAAC/U,GACjC,IAAIgV,GAAW,EACXC,SACJnE,EAAmBoD,EAAIa,aAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAActH,OAAOC,SAAQ,SAACyC,GAC1BwF,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAKnP,IACnCoV,EAAQV,GAAmB/E,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAKnP,GAClCsQ,eAAgBX,EAAM2F,gBAAgBtV,OAG9CyU,EAAcxH,OAAOC,SAAQ,SAACyC,IAC+B,IAAnDiF,EAAiBrK,QAAQoF,EAAMI,SAASxP,OAAgB6T,GAC1De,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAK+F,IAEvCE,EAAQT,GAAmBhF,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAK+F,GAClC5E,eAAgBX,EAAM2F,gBAAgBJ,OAI9C,IAIMK,EAAYC,GAAgBJ,EAAQV,IACpCe,EAAYD,GAAgBJ,EAAQT,IAC1C,GAAIL,EAAmBiB,EAAWE,GALb,kBAAMxB,EAAIyB,kBACV,kBAAMxB,EAAIwB,iBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBR,EAAMjI,SAAQ,SAAC0I,EAASC,GACpBF,EAAS5F,EAAO8F,GAAKtV,MAAQqV,KAE7BZ,GAAY7B,EAAMC,QAAUiB,EAC5BlF,EAAK8F,GAAeU,GAGpBxG,EAAK3E,KAAKmL,GACVX,GAAW,EACXC,EAAcjV,QAEf,IAAKqU,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMW,EAAW,GACb5I,EAAMwH,EAActH,OAAOlH,OAAS,EACxCoP,EAAMjI,SAAQ,SAAC0I,EAASC,GAEhBF,EAAS5F,EAAO8F,GAAKtV,MADrBsV,GAAO9I,EACsB6I,EAGA,QAGrCZ,GAAW,EACXC,EAAcjV,EACdmP,EAAK3E,KAAKmL,UAKf,IAAI3T,GAAUmN,EAAMY,EAAQ,CAAExP,SCjHzC,SAASuV,EAAW1O,EAAGO,GACnB,IAAMoO,EAAKA,GAAG3O,EACR4O,EAAKA,GAAGrO,EACd,OAAIoO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB,uDAAXL,EAIrC,OAHII,EAAInQ,OAAS,GArBrB,SAASqQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKjO,KAAKsK,OAAO4D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACNzW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EAC3ByW,EAAOzW,GAAKwW,EAAQxW,GAKxB,IAHA,IAAIoH,EAAIiP,EACJ1O,EAAI4O,EAAM,EAELvW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EACvBoH,EAAImP,GACJC,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GACEA,EAAI2O,GACXE,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,GACE+O,EAAOM,EAAOrP,GAAIqP,EAAO9O,KAAO,GACvC6O,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,IAELoP,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GAqBbsG,CAAMiI,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAInQ,OAAS,EAAGoQ,GAE1BD,E,0gBChCX,SAASQ,EAAqBC,EAAUC,GACpC,IAAMC,EAAmD,SAAvChM,OAAO+L,GAAc9Q,cAA2B,OAAS,MAC3E,OA9CJ,SAAoBgR,EAAUC,GAC1B,IAAIC,SAEJ,OAAQF,GACR,KAAKlU,EAAeC,WACpB,KAAKL,EAAiBE,SAEdsU,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAAJ,OAAUP,EAAIO,GAEd,SAACP,EAAGO,GAAJ,OAAUA,EAAIP,GAE5B,MACJ,QAEQ4P,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,EAAI,GAAK,GAGd,SAACP,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,GAAK,EAAI,GAKhC,OAAOqP,EAYAC,CAAUN,EAAS3G,KAAM6G,GAUpC,SAASK,EAAW/H,EAAMhC,GACtB,IAAMgK,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAlI,EAAKjC,SAAQ,SAACoK,GACV,IAAMC,EAAWD,EAAMnK,GACnBgK,EAAQK,IAAID,GACZF,EAAYF,EAAQtW,IAAI0W,IAAW,GAAG/M,KAAK8M,IAE3CD,EAAY7M,KAAK,CAAC+M,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAYtR,OAAS,OAI5CsR,EAYX,SAASK,EAAoBC,EAAcC,EAAcC,GACrD,IAAMpO,EAAM,CACRqO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,QAAO,SAACC,EAAK5F,EAAM6F,GAE5B,OADAD,EAAI5F,GAAQuF,EAAa,GAAGpL,KAAI,SAAA+K,GAAA,OAASA,EAAMO,EAAmBI,GAAK3R,UAChE0R,IACRvO,GAEIA,EAUX,SAASyO,EAAmB/I,EAAMY,EAAQoI,GAMtC,IALA,IAAIC,SACAC,SACA1B,SACA3W,EAAImY,EAAepS,OAAS,EAEzB/F,GAAK,EAAGA,IACXoY,EAAYD,EAAenY,GAAG,GAC9BqY,EAAWF,EAAenY,GAAG,IAC7B2W,EAAW2B,GAAcvI,EAAQqI,MVrFf,mBU4FHC,EAEXpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAU0Q,EAASjR,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAC1DmI,EAAQ4J,GAAW,WAC1B,IAAMhB,EAAcH,EAAU/H,EAAMwH,EAASrQ,OACvCiS,EAAYF,EAASA,EAAStS,OAAS,GACvC6R,EAAeS,EAASG,MAAM,EAAGH,EAAStS,OAAS,GACnD8R,EAAqBD,EAAarL,KAAI,SAAAkM,GAAA,OAAKH,GAAcvI,EAAQ0I,MAEvEpB,EAAYnK,SAAQ,SAACyK,GACjBA,EAAanN,KAAKkN,EAAmBC,EAAcC,EAAcC,OAGrE5B,EAAUoB,GAAa,SAACjQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO4Q,EAAUnY,EAAGqB,MAIxB0N,EAAKpJ,OAAS,EACdsR,EAAYnK,SAAQ,SAACoK,GACjBnI,EAAK3E,KAAL,MAAA2E,EAAA,EAAamI,EAAM,QAnBG,GAqBvB,WACH,IAAMnB,EAASO,EAAoBC,EAAU0B,GAE7CpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAUwO,EAAO/O,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAH5D,IAiBf,I,EAAMoS,GAAsB,SAAtBA,EAAuBC,EAAYxJ,EAAMY,EAAQoI,GACnD,GAA0B,IAAtBQ,EAAW5S,OAAgB,OAAOoJ,EAEtC,IAAMyJ,EAAYD,EAAW,GACvBpM,EAAM,IAAI6K,IAEhBjI,EAAK4I,QAAO,SAACC,EAAKa,GACd,IAAMC,EAAOD,EAAQD,EAAUtS,OAM/B,OALI0R,EAAIR,IAAIsB,GACRd,EAAInX,IAAIiY,GAAMtO,KAAKqO,GAEnBb,EAAIP,IAAIqB,EAAM,CAACD,IAEZb,IACRzL,GAdmE,2BAgBtE,YAAuBA,EAAvB,+CAA4B,wBAAlBhL,EAAkB,KAAbgE,EAAa,KAClBwT,EAAOL,EAAoBC,EAAWH,MAAM,GAAIjT,EAAKwK,EAAQoI,GACnE5L,EAAIkL,IAAIlW,EAAKwX,GACT3L,MAAMqB,QAAQsK,IACdb,EAAkBa,EAAMhJ,EAAQoI,IApB8B,6EAwBtE,OAAO5L,GA2CJ,SAASyM,GAAUC,EAASd,GAAgB,IACzCpI,EAAiBkJ,EAAjBlJ,OAAQZ,EAAS8J,EAAT9J,KAGd,GAA8B,KAD9BgJ,EAAiBA,EAAee,QAAO,SAAAC,GAAA,QAAab,GAAcvI,EAAQoJ,EAAQ,QAC/DpT,OAAnB,CAEA,IAAIqT,EAAkBjB,EAAekB,WAAU,SAAAF,GAAA,OAA0B,OAAfA,EAAQ,MAClEC,GAAuC,IAArBA,EAAyBA,EAAkBjB,EAAepS,OAE5E,IAAMuT,EAAyBnB,EAAeK,MAAM,EAAGY,GACjDG,EAAsBpB,EAAeK,MAAMY,GAEjDlB,EAAkB/I,EAAMY,EAAQuJ,GAChCnK,EA5CJ,SAAyBA,EAAMY,EAAQoI,EAAgBQ,GAQnD,GAA8B,KAP9BR,EAAiBA,EAAee,QAAO,SAACM,GACpC,OAAkB,OAAdA,EAAO,KACPb,EAAWnO,KAAKgP,EAAO,KAChB,OAIIzT,OAAgB,OAAOoJ,EAE1CwJ,EAAaA,EAAWpM,KAAI,SAAAlM,GAAA,OAAKiY,GAAcvI,EAAQ1P,MAEvD,IAAMoZ,EAAiBf,GAAoBC,EAAYxJ,EAAMY,EAAQoI,GACrE,OAAOhJ,EAAK5C,KAAI,SAACmN,GAIb,IAHA,IAAI1Z,EAAI,EACJ2Z,EAAUF,GAENrM,MAAMqB,QAAQkL,IAClBA,EAAUA,EAAQ9Y,IAAI6Y,EAAIf,EAAW3Y,KAAKsG,QAG9C,OAAOqT,EAAQ/M,WAuBZgN,CAAezK,EAAMY,EAAQwJ,EAAqBD,EAAuB/M,KAAI,SAAAiN,GAAA,OAAUA,EAAO,OAErGP,EAAQY,KAAO1K,EAAK5C,KAAI,SAAAmN,GAAA,OAAOA,EAAII,SACnCb,EAAQ9J,KAAOA,GCjPZ,SAAS4K,GAAa1K,EAAY0B,EAAYiJ,EAAe7B,EAAgBlN,GAKhFA,EAAUvK,OAAO8Q,OAAO,GAJL,CACfyI,QAAQ,EACRC,YAAY,GAEwBjP,GAExC,IAAMkP,EAAS,CACXpK,OAAQ,GACRZ,KAAM,GACN0K,KAAM,IAEJI,EAAShP,EAAQgP,OACjBG,EAAajC,GAAkBA,EAAepS,OAAS,EAEvDsU,EAAa,GAiDnB,GA/CgBL,EAAc/I,MAAM,KAE5B/D,SAAQ,SAACoN,GACb,IAAK,IAAIta,EAAI,EAAGA,EAAIqP,EAAWtJ,OAAQ/F,GAAK,EACxC,GAAIqP,EAAWrP,GAAGO,SAAW+Z,EAAS,CAClCD,EAAW7P,KAAK6E,EAAWrP,IAC3B,UAMZqa,EAAWnN,SAAQ,SAACyC,GAEhBwK,EAAOpK,OAAOvF,KAAKmF,EAAMI,aAGzBkK,GACAE,EAAOpK,OAAOvF,KAAK,CACfjK,KAAM,MACNyP,KAAM,eAIdc,EAAmBC,GAAY,SAAC/Q,GAC5Bma,EAAOhL,KAAK3E,KAAK,IACjB,IAAM+P,EAAYJ,EAAOhL,KAAKpJ,OAAS,EAEvCsU,EAAWnN,SAAQ,SAACyC,EAAOuF,GACvBiF,EAAOhL,KAAKoL,GAAWrF,EAFf,GAE6BvF,EAAM0F,aAAalG,KAAKnP,MAE7Dia,IACAE,EAAOhL,KAAKoL,GAAWF,EAAWtU,QAAU/F,GAGhDma,EAAON,KAAKrP,KAAKxK,GAIboa,GAAcD,EAAOhL,KAAKoL,GAAW/P,KAAKxK,MAI9Coa,GACApB,GAASmB,EAAQhC,GAGjBlN,EAAQiP,WAAY,CACpB,IAAMM,EAAUpN,mB,sHAAAA,CAASA,MAAM+M,EAAOpK,OAAOhK,UAASwG,KAAI,iBAAM,MAChE4N,EAAOhL,KAAKjC,SAAQ,SAACiI,GACjBA,EAAMjI,SAAQ,SAACiC,EAAMnP,GACjBwa,EAAQxa,GAAGwK,KAAK2E,SAGxBgL,EAAOhL,KAAOqL,EAGlB,OAAOL,EC1EJ,SAASM,GAAYxG,EAAKC,GAC7B,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAiBX,SAAS2E,EAAkBC,EAAIvL,EAAWwL,GACtCnK,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACPD,GAAW9L,EAAK3E,KAAK2K,GACzBuF,EAAUQ,IAAY,MASlC,OAjCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA2BtCwa,EAAkB7G,EAAK2G,GAAuB,GAC9CE,EAAkB9G,EAAK2G,GAAuB,GAEvC,IAAI5Y,GAAUmN,EAAMY,EAAQ,CAAExP,S,sPC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAAS2X,GAAkBlF,GACvB,OAAOA,EAAIgD,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAShD,SAAS+J,GAAKnF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMkO,EAAiBF,GAAkBlF,GAIzC,OAHiBoF,EAAevV,OACZuV,EAAevD,QAAO,SAACC,EAAKuD,GAAN,OAAevD,EAAMuD,IAAM,GAC/CjK,EAAkBK,KAG5C,OAAOL,EAAkBK,KAU7B,SAAS6J,GAAKtF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMqO,EAAWJ,GAAInF,GACfnJ,EAAMmJ,EAAInQ,QAAU,EAC1B,OAAQ+G,OAAO4O,MAAMD,IAAaA,aAAoBnK,EAC7CA,EAAkBK,KAAO8J,EAAW1O,EAEjD,OAAOuE,EAAkBK,KAgG7B,IAAMgK,YACDrY,GAAM+X,IADL,KAED9X,GAAMiY,IAFL,KAGDhY,IAzFL,SAAc0S,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAKyT,IAAL,MAAAzT,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA+EvB,KAIDlO,IAzEL,SAAcyS,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAK0T,IAAL,MAAA1T,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA8DvB,KAKDjO,IAzDL,SAAgBwS,GACZ,OAAOA,EAAI,MAmDT,KAMDvS,IA/CL,SAAeuS,GACX,OAAOA,EAAIA,EAAInQ,OAAS,MAwCtB,KAODnC,IArCL,SAAgBsS,GACZ,OAAIzH,EAAQyH,GACDA,EAAInQ,OAERuL,EAAkBK,QA0BvB,KAQD9N,IAbL,SAAcqS,GACV,OAAO9N,KAAK2T,KAbhB,SAAmB7F,GACf,IAAI8F,EAAOR,GAAItF,GACf,OAAOsF,GAAItF,EAAI3J,KAAI,SAAA0P,GAAA,gBAAQA,EAAMD,EAAS,OAWzBE,CAAShG,OAIxB,GAWAiG,GAAqB7Y,G,0PCzCnB0J,GAjGFoP,G,WACF,aAAe,Y,4FAAA,SACXhY,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK4I,MAAMyK,IAAI,aAAc4E,IAE7B3b,OAAO4b,QAAQX,IAAQzO,SAAQ,SAAC3L,GAC5B,EAAKyL,MAAMyK,IAAIlW,EAAI,GAAIA,EAAI,O,oDAc/B,IAAK,UAAOwE,OACR,OAAO3B,KAAK4I,MAAMnM,IAAI,cAG1B,IAAI0b,EAAUA,UAAVA,8BAEJ,GAAuB,mBAAZA,EACPnY,KAAK4I,MAAMyK,IAAI,aAAc8E,OAC1B,CAEH,GADAA,EAAU1R,OAAO0R,IAC6B,IAA1C7b,OAAO0J,KAAKuR,IAAQpR,QAAQgS,GAG5B,MAAM,IAAI1H,MAAJ,WAAqB0H,EAArB,0BAFNnY,KAAK4I,MAAMyK,IAAI,aAAckE,GAAOY,IAK5C,OAAOnY,O,+BAmCD7D,EAAMgc,GAAS,WACrB,GAAuB,mBAAZA,EACP,MAAM,IAAI1H,MAAM,gCAMpB,OAHAtU,EAAOsK,OAAOtK,GACd6D,KAAK4I,MAAMyK,IAAIlX,EAAMgc,GAEd,WAAQ,EAAKC,aAAajc,M,mCAGvBA,GACN6D,KAAK4I,MAAMwK,IAAIjX,IACf6D,KAAK4I,MAAMyP,OAAOlc,K,8BAIjBA,GACL,OAAIA,aAAgBmc,SACTnc,EAEJ6D,KAAK4I,MAAMnM,IAAIN,O,KAgBfoc,IARO,QAHd3P,GAAQ,QAIJA,GAAQ,IAAIoP,IAETpP,I,+YC5Cf,SAAS4P,GAASC,EAAWtN,EAAUuN,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWtN,GAC7B,IAAMsE,EAAS,GAEToJ,EADaJ,EAAUrI,gBACCvE,eAY9B,OAVAvP,OAAO4b,QAAQW,GAAY/P,SAAQ,YAAW,IAAT3L,EAAS,WACtCgO,GAAYA,EAASxJ,QACU,IAA3BwJ,EAAShF,QAAQhJ,IACjBsS,EAAOrJ,KAAKjJ,GAGhBsS,EAAOrJ,KAAKjJ,MAIbsS,EAyCWqJ,CAAYL,EAAWtN,GACnC4N,EAhCV,SAAwBN,GAA0B,IAAfC,EAAe,uDAAJ,GACpC3C,EAAS,GAETiD,EADaP,EAAUrI,gBACD5E,aACtByM,EAAaM,GAAaU,iBAchC,OAZA3c,OAAO0J,KAAKgT,GAAUlQ,SAAQ,SAACoQ,GACU,iBAA1BR,EAASQ,KAChBR,EAASQ,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYb,GAAac,QAAQX,EAASQ,IAC5CE,EACArD,EAAOmD,GAAeE,GAEtBrD,EAAOmD,GAAejB,EACtBS,EAASQ,GAAenB,OAGzBhC,EAcYuD,CAAcb,EAAWC,GACtCzN,EAAawN,EAAUrI,gBACvBmJ,EAAgBtO,EAAWI,YAC3BmO,EAASvO,EAAW9O,KACpBsd,EAAe,GACfC,EAAa,GACb/N,EAAS,GACToH,EAAU,GACVhI,EAAO,GACT4O,SAGJrd,OAAO4b,QAAQqB,GAAezQ,SAAQ,YAAkB,cAAhB3L,EAAgB,KAAXN,EAAW,KACpD,IAAgC,IAA5B+b,EAAUzS,QAAQhJ,IAAe4b,EAAW5b,GAG5C,OAFAwO,EAAOvF,KAAKqD,EAAQ,GAAI5M,EAAM8O,WAEtB9O,EAAM8O,SAASC,MACvB,KAAKlN,EAAUC,QACX+a,EAAWtT,KAAKjJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX6a,EAAarT,KAAKjJ,OAK9B,IAAIyc,EAAW,EACflN,EAAmB+L,EAAU9H,aAAa,SAAC/U,GACvC,IAAIie,EAAO,GACXJ,EAAa3Q,SAAQ,SAACgR,GAClBD,EAAUA,EAAV,IAAkBN,EAAcO,GAAG7I,aAAalG,KAAKnP,WAEnCsE,IAAlB6S,EAAQ8G,IACR9G,EAAQ8G,GAAQD,EAChB7O,EAAK3E,KAAK,IACVqT,EAAa3Q,SAAQ,SAACgR,GAClB/O,EAAK6O,GAAUE,GAAKP,EAAcO,GAAG7I,aAAalG,KAAKnP,MAE3D8d,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAK6O,GAAUE,GAAK,CAACP,EAAcO,GAAG7I,aAAalG,KAAKnP,OAE5Dge,GAAY,GAEZF,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAKgI,EAAQ8G,IAAOC,GAAG1T,KAAKmT,EAAcO,GAAG7I,aAAalG,KAAKnP,UAM3E,IAAIme,EAAc,GACdC,EAAgB,kBAAMvB,EAAUnH,gBAcpC,OAbAvG,EAAKjC,SAAQ,SAACwM,GACV,IAAMvE,EAAQuE,EACdoE,EAAW5Q,SAAQ,SAACgR,GAChB/I,EAAM+I,GAAKf,EAAWe,GAAGxE,EAAIwE,GAAIE,EAAeD,SAGpDpB,GACAA,EAAkBsB,wBAClBN,EAAehB,GAGfgB,EAAe,IAAI/b,GAAUmN,EAAMY,EAAQ,CAAExP,KAAMqd,IAEhDG,EC9HJ,SAASO,GAAmBrK,EAAKC,GACpC,IAIMqK,EAAkB7K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACe,EAAWE,GACf,IAAI+I,GAAc,EASlB,OARAD,EAAgBrR,SAAQ,SAACkL,GAGjBoG,IAFAjJ,EAAU6C,GAAWqG,gBACrBhJ,EAAU2C,GAAWqG,gBAAiBD,MAMvCA,GCjBR,SAASE,GAAOzK,EAAKC,GACxB,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAgBX,SAAS2E,EAAmBC,EAAIvL,GAC5BqB,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACX/L,EAAK3E,KAAK2K,GACVuF,EAAUQ,IAAY,MASlC,OAhCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA0BtCwa,EAAkB9G,EAAK2G,GACvBG,EAAkB7G,EAAK2G,GAEhB,IAAI7Y,GAAUmN,EAAMY,EAAQ,CAAExP,SCvDlC,SAASoe,GAAeC,EAAYC,EAAY1K,GACnD,OAAOH,EAAa4K,EAAYC,EAAY1K,GAAU,EAAOhB,EAAME,WAGhE,SAASyL,GAAgBF,EAAYC,EAAY1K,GACpD,OAAOH,EAAa6K,EAAYD,EAAYzK,GAAU,EAAOhB,EAAMG,Y,8PCFlDyL,G,WAUjB,WAAaxe,EAAM4O,EAAMY,EAAQvJ,I,4FAAQ,SACrCpC,KAAK7D,KAAOA,EACZ6D,KAAK2L,OAASA,EACd3L,KAAKoC,OAASA,EACdpC,KAAK+K,KAAO/K,KAAK4a,UAAU7P,G,6CAUpBA,GAAM,WACb,OAAOA,EAAK5C,KAAI,SAAA+K,GAAA,OAAS,EAAK9Q,OAAOuE,MAAMuM,EAAO,CAAEnT,OAAS,EAAK4L,OAAO5L,gB,+PC3B5D8a,G,yKAQb,MAAM,IAAIpK,MAAM,2B,+PCSHqK,G,WAQjB,WAAa7J,EAActE,I,4FAAY,SACnC3M,KAAKiR,aAAeA,EACpBjR,KAAK2M,WAAaA,E,4CAclB,MAAM,IAAI8D,MAAM,yB,+BAUhB,OAAOzQ,KAAKiR,aAAatF,S,6BAUzB,OAAO3L,KAAKiR,aAAa9U,O,6BAUzB,OAAO6D,KAAKiR,aAAatF,OAAOC,O,gCAUhC,OAAO5L,KAAKiR,aAAatF,OAAOoP,U,oCAUhC,OAAO/a,KAAKiR,aAAatF,OAAOqP,c,oCAUhC,OAAOhb,KAAKiR,aAAatF,OAAOsP,aAAejb,KAAKiR,aAAatF,OAAOxP,O,6BASpE,WACE4O,EAAO,GAIb,OAHA2B,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjCmP,EAAK3E,KAAK,EAAK6K,aAAalG,KAAKnP,OAE9BmP,I,sCAUP,MAAM,IAAI0F,MAAM,0B,gCA9FhB,MAAM,IAAIA,MAAM,yB,8BAsIhB,MApCgB,CACZyK,QAAU,GACVC,SAAWnb,KACXgU,UAAY,SAAS7X,GAEjB,OADA6D,KAAKkb,QAAQ/e,KAAOA,EACb6D,MAEX2L,OAAS,SAASA,GAEd,OADA3L,KAAKkb,QAAQvP,OAASA,EACf3L,MAEX+K,KAAO,SAASA,GAEZ,OADA/K,KAAKkb,QAAQnQ,KAAOA,EACb/K,MAEXiR,aAAe,SAASA,GAEpB,OADAjR,KAAKkb,QAAQjK,aAAeA,EACrBjR,MAEX2M,WAAa,SAASA,GAElB,OADA3M,KAAKkb,QAAQvO,WAAaA,EACnB3M,MAEXob,MAAQ,WACJ,IAAInK,EAAe,KACnB,GAAGjR,KAAKkb,QAAQjK,wBAAwB0J,GACpC1J,EAAejR,KAAKkb,QAAQjK,iBAC1B,KAAGjR,KAAKkb,QAAQvP,SAAU3L,KAAKkb,QAAQnQ,KAIzC,MAAM,IAAI0F,MAAM,4BAHhBQ,EAAe,IAAI0J,GAAa3a,KAAKkb,QAAQ/e,KAAM6D,KAAKkb,QAAQnQ,KAAM/K,KAAKkb,QAAQvP,OAAQ3L,KAAKmb,SAAS/Y,UAK7G,OAAO,IAAIpC,KAAKmb,SAASlK,EAAajR,KAAKkb,QAAQvO,kB,+PC/J9C0O,G,stBAYb,OAHKrb,KAAKsb,gBACNtb,KAAKsb,cAAgBtb,KAAKub,uBAEvBvb,KAAKsb,gB,4CAUZ,MAAM,IAAI7K,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAjCmB+P,I,0PCElBU,G,stBAYb,OAHKxb,KAAKsb,gBACNtb,KAAKsb,cAAgBtb,KAAKub,uBAEvBvb,KAAKsb,gB,6BAUZ,OAAOtb,KAAKiR,aAAatF,OAAO8P,O,iCAUhC,OAAOzb,KAAKiR,aAAatF,OAAOwN,UAAYpB,K,qCAShC,IACJ2D,EAAiB1b,KAAKiR,aAAatF,OAAnC+P,aACR,OAAOA,aAAwBpD,SAAWoD,EAAe7Q,I,4CAUzD,MAAM,IAAI4F,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAhEiB+P,I,0PCDhBa,G,mtBAQVxa,GAQH,OALK+L,EAAkB0O,UAAUza,GAGpB+L,EAAkB2O,eAAe1a,GAFjCsF,OAAOtF,GAAK2a,W,GAZcjB,I,0PCC1BkB,G,utBASb,OAAO3d,EAAiBC,c,4CAUL,WACbwb,EAAO,IAAImC,IACXC,EAAS,GAUf,OAPAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAGb+I,K,gCAIP,OAAO,IAAIN,O,GAnCsBN,I,0PCApBc,G,mtBAoBVhb,E,GAAkB,IAAXpB,EAAW,EAAXA,OACN6D,SAKJ,GAHI5D,KAAKoc,OACLpc,KAAKoc,KAAO,IAAItc,EAAkBC,IAEjCmN,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIhB,EAAaH,KAAKoc,KAAK3T,cAActH,GACzCyC,EAASzD,EAAaA,EAAWoK,UAAY2C,EAAkBM,GAInE,OAAO5J,M,GAhC6BiX,I,0PCEvBwB,G,YAQjB,WAAapL,EAActE,I,4FAAY,e,iKAAA,wDAC7BsE,EAActE,IADe,OAGnC,EAAK2P,eAAiB,KAHa,E,wXAahB,WACbzC,EAAO,IAAImC,IACXC,EAAS,GAYf,OARAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAIb+I,I,qDAWP,GAAIjc,KAAKsc,eACL,OAAOtc,KAAKsc,eAUhB,IAPA,IAAMC,EAAavc,KAAK+K,OAAO+J,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAAoB8E,MAAK,SAAChP,EAAGO,GAAJ,OAAUP,EAAIO,KACjGiZ,EAAQD,EAAW5a,OACrB8a,EAAU/T,OAAOgU,kBACjBC,SACAC,SACAC,EAAiB,EAEZjhB,EAAI,EAAGA,EAAI4gB,EAAO5gB,IACvB+gB,EAAYJ,EAAW3gB,EAAI,IAC3BghB,EAAYL,EAAW3gB,MAEL+gB,IAIlBF,EAAUzY,KAAKyT,IAAIgF,EAASG,EAAYL,EAAW3gB,EAAI,IACvDihB,KAQJ,OALKA,IACDJ,EAAU,MAEdzc,KAAKsc,eAAiBG,EAEfzc,KAAKsc,iB,+BAUZ,OAAOtc,KAAKiR,aAAatF,OAAO5L,S,sCAUnB,WACPgL,EAAO,GACP+R,EAAa9c,KAAKD,SAaxB,OAXA2M,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAErC,GAAIsR,EAAkB0O,UAAU1I,KAAY4J,GAAcpU,OAAOrH,SAAS6R,GAAS,CAE/E,IAAM6J,EAAc7P,EAAkB2O,eAAe3I,IAAUA,EAC/DnI,EAAK3E,KAAK2W,QAEVhS,EAAK3E,KAAKtG,EAAkBwG,SAAS4M,EAAO4J,OAG7C/R,K,gCAIP,OAAO,IAAIoR,O,GAjHmBd,I,0PCHjB2B,G,mtBAQV7b,GAEHA,EAAMsF,OAAOtF,GACb,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAK7ByC,EAASsJ,EAAkB2O,eAAe1a,OALP,CACnC,IAAI8b,EAAU9b,EAAIoH,MALR,2DAMV3E,EAASqZ,EAAavU,OAAOwU,WAAWD,EAAQ,IAAvC,IAA8CvU,OAAOwU,WAAWD,EAAQ,IAC9D/P,EAAkBM,GAIzC,OAAO5J,M,GApB2BiX,I,0PCArBsC,G,muBASb,IAAMC,EAAUpd,KAAKiR,aAAatF,OAAO0R,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQzb,OAAS,M,6BAU7C,OAAO3B,KAAKiR,aAAatF,OAAO0R,Q,gCAIhC,OAAO,IAAIL,O,GAxBiB3B,I,0PCAfiC,G,mtBAQVnc,GACH,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIC,EAAY8b,WAAW/b,EAAK,IAChCyC,EAAS8E,OAAO4O,MAAMlW,GAAa8L,EAAkBM,GAAKpM,EAI9D,OAAOwC,M,GAjB+BiX,I,0PCGzB0C,G,utBASb,OAAO/e,EAAeC,a,4CAUH,WACfgZ,EAAM/O,OAAOgU,kBACbhF,EAAMhP,OAAO8U,kBAiBjB,OAdA9Q,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GACjCsX,aAAiBhG,IAIjBgG,EAAQuE,IACRA,EAAMvE,GAENA,EAAQwE,IACRA,EAAMxE,OAIP,CAACuE,EAAKC,M,gCAId,OAAO,IAAI4F,O,GA1CsB9B,I,0PCNlCiC,G,WACF,c,4FAAa,SACTzd,KAAK0d,WAAc,IAAI1K,I,qDAGT+H,EAAQ4C,GAEtB,OADA3d,KAAK0d,WAAWrK,IAAI0H,EAAQ4C,GACrB3d,O,0BAGP4L,GACA,OAAO5L,KAAK0d,WAAWtK,IAAIxH,K,0BAG3BA,GACA,OAAO5L,KAAK0d,WAAWjhB,IAAImP,O,KAI7BgS,GAAyB,SAAChV,GAC5BA,EACCiV,kBAAkBzf,EAAiBC,YAAY0d,IAC/C8B,kBAAkBzf,EAAiBE,SAAS+d,IAC5CwB,kBAAkBzf,EAAiBG,OAAO4e,IAC1CU,kBAAkBrf,EAAeC,WAAW8e,KAgBlCO,GAbQ,WACnB,IAAIlV,EAAQ,KASZ,OANkB,OAAVA,IACAA,EAAQ,IAAI6U,GACZG,GAAsBhV,IAEnBA,EARQ,GCiFhB,SAASmV,GAAaC,EAAYrS,EAAQsS,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQtc,SACrBsc,EAAUtS,EAAOxD,KAAI,SAAA6B,GAAA,OAAQA,EAAK7N,SAGtC8hB,EAAQnV,SAAQ,SAACqV,EAAQviB,GACrBsiB,EAAWC,GAAUviB,KAGlB+P,EAAOxD,KAAI,SAAA6B,GAAA,OArEtB,SAAyBe,EAAMY,GAG3B,OAFAZ,EAAOA,GAAQ,GAEZ+S,GAAc1K,IAAIzH,EAAOoP,SACjB+C,GAAcrhB,IAAIkP,EAAOoP,SACvBqD,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WALF,MAKkB5B,EAAKpJ,OAAS,IAC9ByZ,QAEF0C,GAAcrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACjG+f,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WALF,MAKkB5B,EAAKpJ,OAAS,IAC9ByZ,QAmDaiD,CAAgBL,EAAWE,EAAWlU,EAAK7N,OAAQ6N,MC5HlE,QACX8S,WAAY/e,EAAWI,M,0PCANmgB,G,WACjB,WAAY1S,I,4FAAK,SACb5L,KAAKue,MAAQ3S,E,2CAOTb,EAAKY,EAAO9E,GAChB,MAAM,IAAI4J,MAAM,qC,2BAJhB,OAAOzQ,KAAKue,U,KCThBC,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBC,GACvB,OAAO,IAAIxG,SAAS,IAAK,WAAawG,EAAQ3W,KAAI,SAAShM,EAAMP,GAC/D,OAAOmjB,KAAKC,UAAU7iB,GAAQ,OAASP,EAAI,OAC1CqH,KAAK,KAAO,KAWjB,SAASgc,GAAaC,GACpB,IAAIC,EAAY7iB,OAAOY,OAAO,MAC1B4hB,EAAU,GAUd,OARAI,EAAKpW,SAAQ,SAASwM,GACpB,IAAK,IAAI8J,KAAU9J,EACX8J,KAAUD,GACdL,EAAQ1Y,KAAK+Y,EAAUC,GAAUA,MAKhCN,EAGT,SAASjf,GAAIhD,EAAOwiB,GAClB,IAAI1hB,EAAId,EAAQ,GAAI8E,EAAShE,EAAEgE,OAC/B,OAAOA,EAAS0d,EAAQ,IAAIrW,MAAMqW,EAAQ1d,EAAS,GAAGsB,KAAK,GAAKtF,EAAIA,EAStE,SAAS2hB,GAAW3f,GAClB,IAPkBwE,EAOd3B,EAAQ7C,EAAK4f,cACbC,EAAU7f,EAAK8f,gBACfC,EAAU/f,EAAKggB,gBACfC,EAAejgB,EAAKkgB,qBACxB,OAAOvI,MAAM3X,GAAQ,iBAXHwE,EAYDxE,EAAKmgB,kBAXR,EAAI,IAAMjgB,IAAKsE,EAAM,GAC/BA,EAAO,KAAO,IAAMtE,GAAIsE,EAAM,GAC9BtE,GAAIsE,EAAM,IAS+B,IAAMtE,GAAIF,EAAKogB,cAAgB,EAAG,GAAK,IAAMlgB,GAAIF,EAAKqgB,aAAc,IAC1GJ,EAAe,IAAM/f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAAM7f,GAAI+f,EAAc,GAAK,IACnHF,EAAU,IAAM7f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAChFF,GAAWhd,EAAQ,IAAM3C,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IACjE,IAGO,gBAASS,GACtB,IAAIC,EAAW,IAAI9f,OAAO,KAAQ6f,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAU/f,EAAM+T,GACvB,IAIIvX,EAJAoiB,EAAO,GACPoB,EAAIhgB,EAAKqB,OACT4e,EAAI,EACJljB,EAAI,EAEJmjB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAASpa,IACP,GAAIma,EAAK,OAAO/B,GAChB,GAAIgC,EAAK,OAAOA,GAAM,EAAOjC,GAG7B,IAAI5iB,EAAUK,EAAPykB,EAAIH,EACX,GAAIjgB,EAAK8f,WAAWM,KAAOhC,GAAO,CAChC,KAAO6B,IAAMD,GAAKhgB,EAAK8f,WAAWG,KAAO7B,IAASpe,EAAK8f,aAAaG,KAAO7B,KAI3E,OAHK9iB,EAAI2kB,IAAMD,EAAGE,GAAM,GACdvkB,EAAIqE,EAAK8f,WAAWG,QAAU5B,GAAS8B,GAAM,EAC9CxkB,IAAM2iB,KAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,GACpEjgB,EAAK8T,MAAMsM,EAAI,EAAG9kB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOggB,EAAID,GAAG,CACZ,IAAKrkB,EAAIqE,EAAK8f,WAAWxkB,EAAI2kB,QAAU5B,GAAS8B,GAAM,OACjD,GAAIxkB,IAAM2iB,GAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,OACtE,GAAItkB,IAAMkkB,EAAW,SAC1B,OAAO7f,EAAK8T,MAAMsM,EAAG9kB,GAIvB,OAAO4kB,GAAM,EAAMlgB,EAAK8T,MAAMsM,EAAGJ,GAGnC,IA7BIhgB,EAAK8f,WAAWE,EAAI,KAAO3B,MAAW2B,EACtChgB,EAAK8f,WAAWE,EAAI,KAAO1B,MAAU0B,GA4BjCxjB,EAAIuJ,OAAaoY,IAAK,CAE5B,IADA,IAAInJ,EAAM,GACHxY,IAAM0hB,IAAO1hB,IAAM2hB,IAAKnJ,EAAIlP,KAAKtJ,GAAIA,EAAIuJ,IAC5CgO,GAA4B,OAAtBiB,EAAMjB,EAAEiB,EAAKjY,OACvB6hB,EAAK9Y,KAAKkP,GAGZ,OAAO4J,EAGT,SAASyB,EAAczB,EAAMJ,GAC3B,OAAOI,EAAK/W,KAAI,SAASmN,GACvB,OAAOwJ,EAAQ3W,KAAI,SAASiX,GAC1B,OAAOwB,EAAYtL,EAAI8J,OACtBnc,KAAKgd,MAkBZ,SAASY,EAAUvL,GACjB,OAAOA,EAAInN,IAAIyY,GAAa3d,KAAKgd,GAGnC,SAASW,EAAY/jB,GACnB,OAAgB,MAATA,EAAgB,GACjBA,aAAiB+C,KAAO0f,GAAWziB,GACnCqjB,EAASY,KAAKjkB,GAAS,IAAM,IAAOA,EAAM0D,QAAQ,KAAM,MAAU,IAClE1D,EAGR,MAAO,CACL8J,MA5FF,SAAerG,EAAM+T,GACnB,IAAI0M,EAASjC,EAASI,EAAOmB,EAAU/f,GAAM,SAASgV,EAAK1Z,GACzD,GAAImlB,EAAS,OAAOA,EAAQzL,EAAK1Z,EAAI,GACrCkjB,EAAUxJ,EAAKyL,EAAU1M,EAtD/B,SAAyByK,EAASzK,GAChC,IAAI/W,EAASuhB,GAAgBC,GAC7B,OAAO,SAASxJ,EAAK1Z,GACnB,OAAOyY,EAAE/W,EAAOgY,GAAM1Z,EAAGkjB,IAmDMkC,CAAgB1L,EAAKjB,GAAKwK,GAAgBvJ,MAGzE,OADA4J,EAAKJ,QAAUA,GAAW,GACnBI,GAuFPmB,UAAWA,EACXtgB,OA5BF,SAAgBmf,EAAMJ,GAEpB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrC,CAACJ,EAAQ3W,IAAIyY,GAAa3d,KAAKgd,IAAYgB,OAAON,EAAczB,EAAMJ,IAAU7b,KAAK,OA2B5Fie,WAxBF,SAAoBhC,EAAMJ,GAExB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrCyB,EAAczB,EAAMJ,GAAS7b,KAAK,OAuBzCke,WApBF,SAAoBjC,GAClB,OAAOA,EAAK/W,IAAI0Y,GAAW5d,KAAK,SC1IhCme,GAAMC,GAAI,KCAVC,IDEkBF,GAAIza,MACAya,GAAIf,UACPe,GAAIrhB,OACAqhB,GAAIF,WACJE,GAAID,WCNrBE,GAAI,OAEQC,GAAI3a,MACA2a,GAAIjB,UACPiB,GAAIvhB,OACAuhB,GAAIJ,WACJI,GAAIH,WC8ChBI,OAnCf,SAAgBzP,EAAKnG,EAAQ9E,GACzB,IAAKmC,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAEpB,IAGM+Q,EAAe7V,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QACzD0K,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,GAGuB7a,GAE3C,IAAMiY,EAAU,GACV1Y,EAAOub,EAAY7C,GAErBb,EAAUuD,EACV3a,EAAQ6a,iBAGRzD,EAAUnM,EAAItK,OAAO,EAAG,GAAG,IAG/B,IAAMoa,EAAY3D,EAAQtK,QAAO,SAACC,EAAKiO,EAAGjmB,GAAT,OAC7BU,OAAO8Q,OAAOwG,G,EAAYhY,G,EAAJimB,K,EAAtB,I,sGACD,IAUH,OARA/P,EAAIhJ,SAAQ,SAACD,GACT,IAAM0C,EAAQ,GAKd,OAJAiW,EAAa1Y,SAAQ,SAACgZ,GAClB,IAAMC,EAAYH,EAAUE,GAC5BvW,EAAMnF,KAAKyC,EAAOkZ,OAEf3b,eAAQmF,MAEZ,CAACiW,EAAc1C,IChBXkD,OAXf,SAAiB7X,EAAKwB,EAAQ9E,GAK1BA,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,EAChBO,eAAgB,KAEuBpb,GAE3C,IAAMwa,EAAMa,GAAMrb,EAAQob,gBAC1B,OAAOV,GAAOF,EAAIhB,UAAUlW,GAAMwB,EAAQ9E,I,0PC5BzBsb,G,YACjB,aAAa,O,4FAAA,S,iKAAA,wDACHpkB,EAAWE,U,0WAGb8M,EAAOY,EAAS9E,GACpB,OAAOmb,GAAOjX,EAAKY,EAAO9E,O,GANcyX,ICqDjC8D,OA7Bf,SAAmBtQ,EAAKnG,GACpB,IAAK3C,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAGpB,IAAM0N,EAAS,GACXviB,EAAI,EACJymB,SACEvD,EAAU,GACV1Y,EAAOub,EAAY7C,GACnBwD,EAAmB3W,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QAgB7D,OAdA2V,EAAIhJ,SAAQ,SAACkB,GACT,IAAMnB,EAAS,GACfyZ,EAAiBxZ,SAAQ,SAAC2Y,GAClBA,KAActD,EACdkE,EAAiBlE,EAAOsD,IAExBtD,EAAOsD,GAAc7lB,IACrBymB,EAAiBzmB,EAAI,GAEzBiN,EAAOwZ,GAAkBrY,EAAKyX,MAElCrb,eAAQyC,MAGL,CAACvM,OAAO0J,KAAKmY,GAASW,I,0PClDZyD,G,YACjB,aAAa,O,4FAAA,S,iKAAA,wDACHxkB,EAAWC,Y,0WAGb+M,EAAOY,EAAS9E,GACpB,OAAOub,GAASrX,EAAKY,EAAO9E,O,GANOyX,I,0PCAtBkE,G,YACjB,aAAa,O,4FAAA,S,iKAAA,wDACHzkB,EAAWG,U,0WAGb6M,EAAOY,EAAS9E,GACpB,OAAO0a,GAAOxW,EAAKY,EAAO9E,O,GANayX,ICmBhCmE,OAXf,SAAe1X,EAAMY,EAAQ9E,GACzB,IAAM6b,EAAa,CAAEN,YAAUJ,UAAQT,WACjCzE,EAAahS,EAAiBC,GAEpC,IAAK+R,EACD,MAAM,IAAIrM,MAAM,mCAGpB,OAAOiS,EAAW5F,GAAY/R,EAAMY,EAAQ9E,I,0PChB3B8b,G,YACjB,aAAa,O,4FAAA,S,iKAAA,wDACH5kB,EAAWI,O,0WAGb4M,EAAOY,EAAS9E,GACpB,OAAO1I,GAAK4M,EAAKY,EAAO9E,O,GANeyX,I,6PCEzCsE,G,WACF,c,4FAAa,SACT5iB,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK0iB,WAAW1iB,KAAK6iB,yB,2DAIrB,MAAO,CACH,IAAIV,GACJ,IAAIK,GACJ,IAAID,GACJ,IAAII,M,iCASDD,GAAW,WAIlB,OAHGA,EAAW/gB,QACV+gB,EAAW5Z,SAAQ,SAAAga,GAAA,OAAa,EAAKla,MAAMyK,IAAIyP,EAAUlX,KAAKkX,MAE3D9iB,KAAK4I,Q,+BAQPka,GAIL,OAHGA,aAAqBxE,IACpBte,KAAK4I,MAAMyK,IAAIyP,EAAUlX,KAAKkX,GAE3B9iB,O,iCASA8iB,GAEP,OADA9iB,KAAK4I,MAAMyP,OAAOyK,EAAUlX,MACrB5L,O,0BAGP7D,GACA,OAAG6D,KAAK4I,MAAMwK,IAAIjX,GACP6D,KAAK4I,MAAMnM,IAAIN,GAEnB,S,KAiBA4mB,GAZS,WACpB,IAAIna,EAAQ,KAQZ,OALkB,OAAVA,IACAA,EAAQ,IAAIga,IAETha,EAPS,G,ioBClDxB,SAASoa,GAAsBna,EAAQqI,EAAe+R,EAASrnB,GAC3D,IAAMsnB,EAAO,GADiD,uBAG9D,YAA2Bra,EAAOqP,UAAlC,+CAA6C,yBAAjC/a,EAAiC,KAA5BoO,EAA4B,KACzC2X,EAAK3X,EAAMpP,QAAU,IAAI6P,EAAMkF,EAAc/T,GAAKvB,GAAIqnB,EAAQ9lB,GAAKvB,GAAI2P,IAJb,6EAM9D,OAAO2X,EAGJ,SAAS9R,GAAiBvI,GAC7B,IAAMqa,EAAO,GAEb,IAAK,IAAM/lB,KAAO0L,EACdqa,EAAK/lB,GAAO,IAAI6O,EAAMnD,EAAO1L,GAAK+O,eAAgBrD,EAAO1L,GAAK8O,SAAU9O,GAE5E,OAAO+lB,EAGJ,IAAMC,GAAe,SAAC,EAA6BC,EAAmBC,GAAmB,cAAlE1W,EAAkE,KAAtDiJ,EAAsD,KACxF0N,EAAS1N,EAAcjU,OAASiU,EAAc/I,MAAM,KAAO,GAC3D0W,EAAkBH,EAAkB/X,YACpCmY,EAAYF,EAAOnb,KAAI,SAAAsb,GAAA,OfoDxB,SAAoCxS,EAActE,GAAY,IACzDhB,EAAWsF,EAAXtF,OAER,OAAGmS,GAAc1K,IAAIzH,EAAOoP,SACjB+C,GAAcrhB,IAAIkP,EAAOoP,SACvBqD,QACAnN,aAAaA,GACbtE,WAAWA,GACXyO,QAEF0C,GAAcrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACjG+f,QACAnN,aAAaA,GACbtE,WAAWA,GACXyO,QelEsBsI,CAA2BH,EAAgBE,GAAMxS,aAActE,MAClG,OAAO1B,EAAWC,gBAAgBsY,EAAWH,IAGpCM,GAA2B,SAACC,EAAOC,GAAuC,IACzC,EADa1W,EAA4B,uDAAnB,GAAI2W,EAAe,aAC/ED,IAActV,EAAeI,SAC7BiV,EAAMG,YAAYpiB,OAAS,GAC3B,EAAAiiB,EAAMG,aAAY3d,KAAlB,WAA0B0d,KAE1BF,EAAMG,YAAY3d,KAAK,CACnB4d,GAAIH,EACJI,KAAM9W,EACN+W,SAAUJ,KAITK,GAA4B,SAACC,EAAUC,GAAU,OAC1D,EAAAA,EAAMC,qBAAoBle,KAA1B,WAAkCge,EAASE,qBAA3C,UAAmEF,EAASL,gBAGnEQ,GAAqB,SAACH,EAAUR,EAAOC,GAAuC,IAA5B1W,EAA4B,uDAAnB,GAAI2W,EAAe,aACvFH,GAAyBC,EAAOC,EAAW1W,EAAQ2W,GACnDK,GAA0BC,EAAUR,IAGlCY,aACD3lB,EAAcC,OAAS,CACpB2lB,UAAW,CAAC,cACZC,SAAU,EAAC,GAAM,KAHnB,MAKD7lB,EAAcE,QAAU,CACrB0lB,UAAW,CAAC,oBACZC,SAAU,EAAC,GAAO,KAPpB,MASD7lB,EAAcG,IAAM,CACjBylB,UAAW,CAAC,aAAc,oBAC1BC,SAAU,EAAC,GAAM,KAXnB,IAeAC,GAAqB,SAAChY,EAAY/Q,EAAGgpB,GACvC,IAA2B,IAAvBA,GAA4BhpB,IAAOgpB,EAAoB,EAAI,CAC3D,IAAMC,EAAKlY,EAAWhL,OAAS,EAE/BgL,EAAWkY,GAASlY,EAAWkY,GAAIhY,MAAM,KAAK,GAA9C,IAAoDjR,OAEpD+Q,EAAWvG,KAAX,GAAmBxK,IAIdkpB,GAA2B,SAACnY,EAAYoY,EAAShoB,GAC1D,IAEMioB,EAAgB,GAChBC,EAAgB,GAJ6C,KAM9BT,GAAcznB,GAAM2nB,SANU,GAM5DQ,EAN4D,KAM9CC,EAN8C,KAanE,OALAzY,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMwpB,EAAgBL,EAAQnpB,GAC9BwpB,GAAiBF,GAAgBP,GAAmBK,EAAeppB,GAT5C,IAUtBwpB,GAAiBD,GAAgBR,GAAmBM,EAAerpB,GAT7C,MAWpB,CACH+Q,WAAYqY,EAAc/hB,KAAK,KAC/BoiB,iBAAkBJ,EAAchiB,KAAK,OAKhCqiB,GAA0B,SAAC3Y,EAAYoY,EAAShoB,EAAM0c,EAAcF,GAC7E,IAAIqL,EAAoB,GAClBW,EAAkB,GAClBC,EAAe,GAyBrB,OAvBA9Y,EAAmBC,GAAY,SAAC/Q,GAC5B,GAAImpB,EAAQnpB,GAAI,CACZ,IAAIie,EAAO,GAEP4L,EAAe,CAAEzf,KAAM,IAE3ByT,EAAa3Q,SAAQ,SAACgR,GAClB,IAAM/O,EAAOwO,EAAcO,GAAG7I,aAAalG,KAAKnP,GAChDie,EAAUA,EAAV,IAAkB9O,EAClB0a,EAAazf,KAAK8T,GAAK/O,UAGG7K,IAA1BqlB,EAAgB1L,KAChB0L,EAAgB1L,GAAQ,GACxB+K,EAAkB/K,IAAS,EAC3B2L,EAAa3L,GAAQ4L,GAGzBd,GAAmBY,EAAgB1L,GAAOje,EAAGgpB,EAAkB/K,IAC/D+K,EAAkB/K,GAAQje,MAI3B,CACH2pB,kBACAC,iBAKKE,GAAe,SAACC,EAAUC,EAAUzY,EAAQiX,EAAUyB,GAC/D,IAAI9L,EAAc,GACdC,EAAgB,kBAAMoK,EAAS9S,gBAC3BvU,EAASoQ,EAATpQ,KACF4P,EAAagZ,EAAShV,YACtB9H,EAAS8c,EAASG,uBAAuBjd,OACzCkd,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAChD8U,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAShD,OAAO8a,EAASlZ,GAPS,SAAAzK,GAAA,OAAS0jB,EAC9B5C,GAAqBna,EAAQkd,EAAqBC,EAAe9jB,GACjEA,EACA8X,EACAD,KAG0Chd,IAGrCkpB,GAAqB,SAACrC,GAC/B,IAAM+B,EAAW/B,EAAMsC,OAAM,GACvB9C,EAAoBQ,EAAMkC,uBAShC,OARAH,EAASjP,eAAiB0M,EAAkBva,OAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK,KAG3EmgB,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KACnCia,EAAS1L,wBAAwBkM,wBAE1BR,GAGLS,GAAS,SAACtU,EAAK/G,EAAMsb,GAGvB,IAFA,IAAIlpB,EAAMkpB,EAAGvU,EAAK/G,EAAM,GAEfnP,EAAI,EAAG+M,EAAMmJ,EAAInQ,OAAQ/F,EAAI+M,EAAK/M,IACvCuB,EAASA,EAAT,IAAgBkpB,EAAGvU,EAAK/G,EAAMnP,GAElC,OAAOuB,GAGEmpB,GAAyB,SAAC1C,EAAO2C,GAA4B,IAAhBpZ,EAAgB,uDAAP,GAC3DqZ,EAAM,GACJ3C,EAAY1W,EAAO0W,WAAaxU,EAChCoX,EAAkBtZ,EAAOsZ,kBAAmB,EAC5CC,EAAcT,GAAmBrC,GACjC+C,EAAoBD,EAAYE,kBAKlCJ,EAHCD,EAAW5kB,OAGN4kB,EAAWpe,KAAI,SAAA0e,GAAA,OACbC,SACEjS,GAF0B4D,EAqCjCoO,GAnC2BE,UACpBC,EAAevO,EAAUmO,kBACzB/N,EAAavc,OAAO0J,KAAKyS,EAAUrI,gBAAgBvE,gBACpDiJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBM,EAAOpO,EAAWlX,OAClBulB,EAAUrO,EAAW1Q,KAAI,SAAAjM,GAAA,OAC3B8qB,EAAa9qB,GAAGgG,SACd8W,EAAW1c,OAAO0J,KAAKyS,EAAUrI,gBAAgB5E,cAClDsJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBQ,EAAc1O,EAAUrI,gBAAgB/E,YACxCN,EAAO8J,EAAQ9J,KACfkR,EAASjD,EAASrF,QAAO,SAACC,EAAKwT,GAEjC,OADAxT,EAAIwT,GAAKD,EAAYC,GAAGnL,SACjBrI,IACR,IACGyT,EAAY,GAElBP,EAAQ,SAAChV,EAAKwD,EAAKzB,GAAX,OAAmByB,EAAIxD,EAAI+B,KAC/BoT,GACAlc,EAAKjC,SAAQ,SAACwM,GACV,IAAMnY,EAAMipB,GAAOc,EAAS5R,EAAKwR,GACjCO,EAAUlqB,GAAO,KAIzB2pB,EAAQ,SAAChV,EAAKjJ,EAAQgL,GAAd,OAAsBhL,EAAOiJ,EAAI+B,IAAMwG,eACxCtP,EAAKpJ,OAAS,SAACkH,GAClB,IAAMye,GAAUL,GAAOI,EAAUjB,GAAOvN,EAAYhQ,EAAQie,IAE5D,OAAIL,EACOzN,EAASuO,OAAM,SAAAhc,GAAA,OAAS1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,IACxE1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,OAAO+b,EAErDA,GACP,kBAAM,GApCqB,IAAC7O,EAC5BqO,EACEjS,EACAmS,EACAnO,EAEAoO,EACAC,EAEAlO,EAEAmO,EACApc,EACAkR,EAIAoL,KAnBJ,CAAC,kBAAM,IAqDjB,OAVIxD,IAAcxU,EACEqX,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIe,OAAM,SAAAlB,GAAA,OAAMA,EAAGxd,QAAU,CACtE4e,WAAW,IAGCf,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIkB,MAAK,SAAArB,GAAA,OAAMA,EAAGxd,QAAU,CACrE4e,WAAW,KA+CVE,GAAuB,SAAChC,EAAUhZ,EAAYyX,EAAUwD,EAAchC,GAC/ED,EAAShV,YAAchE,EACvBgZ,EAAS1L,wBAAwBkM,wBACjC5B,GACIH,EACAuB,EACApX,EAAeC,OACd,CAAErB,OAAQya,GACThC,IA+BGiC,GAAmB,SAACzD,EAAU0D,EAAW3a,EAAQ4a,GAC1D,IAAMC,EAAS5D,EAAS8B,MAAM/Y,EAAOsa,WACjCQ,EAAgBH,EAiBpB,OAhBI3a,EAAOpQ,OAAS8B,EAAcE,UAC9BkpB,EAAgBF,EAAUjT,QAAO,SAAAd,GAAA,OAA+C,IAAlC8T,EAAU3hB,QAAQ6N,OAIpEgU,EAAOtR,eAAiBuR,EAAchlB,KAAK,KAC3C+kB,EAAO/N,wBAAwBkM,wBAE/B5B,GACIH,EACA4D,EACAzZ,EAAeE,QACf,CAAEqZ,YAAW3a,SAAQ+a,gBAAiBD,GACtC,MAGGD,GAIEG,GAAmB,SAAC/D,EAAUgE,EAAcjb,EAAQ4a,GAAjC,OAC5BK,EAAajgB,KAAI,SAAAkgB,GAAA,OACbR,GAAiBzD,EAAUiE,EAAYlb,EAAQ4a,OAE1CO,GAAqB,SAAC7G,GAO/B,IALAA,EAAahY,EAAQ,GAAIgY,IACT7V,OACZ6V,EAAW7V,KAAOlN,EAAUE,YAG3B6iB,EAAW1G,QACZ,OAAQ0G,EAAW7V,MACnB,KAAKlN,EAAUC,QACX8iB,EAAW1G,QAAUvc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX6iB,EAAW1G,QAAU3c,EAAiBC,YAK9C,OAAOojB,GA6BE8G,GAA4B,SAAA5c,GAAA,OAAUA,EAAOxD,KAAI,SAACsZ,GAG3D,OA7B8B,SAACA,GAC/B,IAAM+G,EAA2B,CAAChqB,EAAeC,YAC3CgqB,EAAuB,CACzBrqB,EAAiBC,YACjBD,EAAiBG,OACjBH,EAAiBE,SACjBF,EAAiBsqB,KAEb9c,EAAwB6V,EAAxB7V,KAAMmP,EAAkB0G,EAAlB1G,QAAS5e,EAASslB,EAATtlB,KAEvB,OAAQyP,GACR,KAAKlN,EAAUE,UACX,IAA+C,IAA3C6pB,EAAqBtiB,QAAQ4U,GAC7B,MAAM,IAAItK,MAAJ,qDAA+DsK,EAA/D,aAAmF5e,EAAnF,UAEV,MACJ,KAAKuC,EAAUC,QACX,IAAmD,IAA/C6pB,EAAyBriB,QAAQ4U,GACjC,MAAM,IAAItK,MAAJ,mDAA6DsK,EAA7D,aAAiF5e,EAAjF,UAEV,MACJ,QACI,MAAM,IAAIsU,MAAJ,wCAAkD7E,EAAlD,aAAmEzP,EAAnE,WAMVwsB,CADAlH,EAAa6G,GAAmB7G,IAEzBA,MAeEmH,GAAa,SAACC,EAAU9d,EAAMY,EAAQ9E,GAC/C8E,EAAS4c,GAA0B5c,GACnC9E,EAAUvK,OAAO8Q,OAAO9Q,OAAO8Q,OAAO,GAAI0b,IAAgBjiB,GAC1D,IAAMic,EAAYC,GAAetmB,IAAIoK,EAAQiW,YAG7C,IAAKgG,EACD,MAAM,IAAIrS,MAAJ,mCAA6C5J,EAAQiW,WAArD,WAPiD,MAU3BgG,EAAU/B,QAAQhW,EAAMY,EAAQ9E,GAVL,UAUpDsX,EAVoD,KAU5CjN,EAV4C,MAZ/B,SAACvF,EAAQod,GACrCpd,EAAO7C,SAAQ,SAAC2Y,GACZ,IAAMuH,EAAcvH,EAAWwH,GAC/B,GAAKD,EAAL,CAEA,IAAMnV,EAAMkV,EAAW5iB,QAAQsb,EAAWtlB,MAC1C4sB,EAAWlV,GAAOmV,EAClBvH,EAAWtlB,KAAO6sB,SACXvH,EAAWwH,OAetBC,CAAiBvd,EAAQwS,GACzB,IAAMhT,EAAW4S,GAAa7M,EAAevF,EAAQwS,GAG/CgL,EAAYle,EAAWC,gBAAgBC,EAAUtE,EAAQ1K,MAC/D0sB,EAASO,mBAAqBD,EAG9BN,EAASlY,YAAcO,EAAcvP,QAAUuP,EAAc,GAAGvP,OAAzC,MAAuDuP,EAAc,GAAGvP,OAAS,GAAM,GAG9G,IAAM0nB,EAAe,GACbxgB,EAAWsgB,EAAXtgB,OACFmd,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAC1Cgb,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAQtD,OAPAxE,EAAmBmc,EAASlY,aAAa,SAAC/U,GACtCytB,EAAaztB,GAAKonB,GAAqBna,EAAQkd,EAAqBC,EAAepqB,MAEvFutB,EAAUG,oBAAsBD,EAEhCR,EAASnS,eAAkB/K,EAAOxD,KAAI,SAAA2R,GAAA,OAAKA,EAAE3d,QAAO8G,OACpD4lB,EAASU,YAAc1iB,EAAQiW,aAAe/e,EAAWI,KAAO2M,EAAiBC,GAAQlE,EAAQiW,WAC1F+L,GAGE3U,GAAgB,SAACvI,EAAQJ,GAGlC,IAFA,IAAI3P,EAAI,EAEDA,EAAI+P,EAAOhK,SAAU/F,EACxB,GAAI2P,IAAUI,EAAO/P,GAAGO,KACpB,MAAO,CACHA,KAAMoP,EACNK,KAAMD,EAAO/P,GAAGmf,SAAWpP,EAAO/P,GAAGgQ,KACrC1J,MAAOtG,GAInB,OAAO,MA+BL4tB,GAAgC,SAAC3C,EAAWpO,GAC9C,IAAMgR,EAAchR,EAAUiR,iBAC1BC,EAAiB9C,EAerB,OAbA4C,EAAY3gB,SAAQ,SAAC8gB,GACjB,GAAKA,EAAL,CADgC,IAMjB,EANiB,EAhCF,SAACA,GACnC,IAAIC,EAAS,GACThG,SAEJ,OADAA,EAAY+F,EAAW5F,IAEvB,KAAKzV,EAAeC,OAChBqb,EAAS,CAACD,EAAW1F,UACrB,MACJ,KAAK3V,EAAeE,QAChBob,EAAS,CAACD,EAAW3F,KAAKiE,iBAC1B,MACJ,KAAK3Z,EAAeO,KAChB+a,EAAS,CAACD,EAAW1F,UACrB,MACJ,KAAK3V,EAAeG,QAChBmV,EAAY,UACZgG,EAAS,CAACD,EAAW3F,KAAK6F,cAAcjd,MAAM,KAAM+c,EAAW1F,UAC/D,MACJ,QACIL,EAAY,KAGhB,MAAO,CACHA,YACAgG,UAa8BE,CAAuBH,GAA7C/F,EALwB,EAKxBA,UAAWgG,EALa,EAKbA,OACnB,GAAIhG,EACA8F,GAAiB,EAAAA,GAAe9F,GAAf,WAA6BgG,GAA7B,QAAqC,CAClDpC,WAAW,UAKhBkC,GAWLK,GAAuB,SAAvBA,EAAwBvR,EAAWoO,GAA8C,IAAnC1Z,EAAmC,uDAA1B,GAAI8c,EAAsB,uDAAP,GACtEC,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,eAAiB,GAEpD,GAAI1R,IAAcyR,EAAlB,CAIA,IAAME,GAAYD,EAAcxoB,SAA+C,IAAtCwoB,EAAchkB,QAAQsS,GAE/D2R,GAAa3R,EAAU4R,kBAAkBxD,EAAW1Z,GAEpD,IAAMmd,EAAW7R,EAAU8R,UAC3BD,EAASxhB,SAAQ,SAAC0hB,GACd,IAAMb,EAAiBH,GAA8B3C,EAAW2D,GAChER,EAAqBQ,EAAOb,EAAgBxc,EAAQ8c,QAI/CQ,GAAsB,SAAC7G,GAChC,KAAOA,EAAM8G,SAAW9G,EAAMG,YAAY4G,MAAK,SAAAzuB,GAAA,OAAKA,EAAE8nB,KAAOzV,EAAeG,YACxEkV,EAAQA,EAAM8G,QAElB,OAAO9G,GAGEgH,GAAmB,SAAChH,GAC7B,KAAOA,EAAM8G,SACT9G,EAAQA,EAAM8G,QAElB,OAAO9G,GAGEiH,GAAqB,SAACjH,GAC/B,IADoD,IAAdkH,EAAc,uDAAP,GACtClH,EAAM8G,SACTI,EAAK1kB,KAAKwd,GACVA,EAAQA,EAAM8G,QAElB,OAAOI,GAGEC,GAA2B,SAACC,EAAaC,EAAYC,EAAgB/d,GAC9E,IAAI+W,SACA2C,SACIsE,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Bpe,EAAOoe,4BAMvCC,EAAY,GAEhB,GAAoB,OAAhBR,IAA8C,IAAtB7d,EAAOse,WAC/BD,EAAY,CAAC,CACTtH,SAAU,KAEdA,EAAW,OACR,OACCwH,EAAkBpvB,OAAOqvB,OAAOR,EAAqBS,iBAC/B,IAAtBR,IACAM,EAAkBA,EAAgB5W,QAAO,SAAA5Y,GAAA,OAAKA,EAAEiR,OAAOme,WAAaD,MAGxE,IAAMQ,EAAmBH,EAAgB5W,QAlB5B,SAACgX,GAEd,OADe3e,EAAO4C,UAAa,kBAAM,IAC3B+b,EAAO3e,MAgBqChF,KAAI,SAAA4jB,GAAA,OAAUA,EAAO5e,OAAO+W,YAEhFiG,EAAgB,GAEtB,IAA0B,IAAtBiB,EAA6B,CAC7B,IAAMY,EAAwB1vB,OAAOqvB,OAAOR,EAAqBS,gBAEjEI,EAAsBljB,SAAQ,SAACmjB,GAC3B,IAAMC,EAAaD,EAAU9e,QACI,IAA7B+e,EAAWC,eAA2BD,EAAWH,SAAW5e,EAAO4e,QAC/DG,EAAWZ,WAAaD,IAC5BlB,EAAc/jB,KAAK6lB,EAAUrI,QAC7BM,EAAW8H,EAAsBlX,QAAO,SAAA5Y,GAAA,OAAKA,IAAM+vB,KAAW9jB,KAAI,SAAAjM,GAAA,OAAKA,EAAEiR,OAAO+W,aACvEviB,QAAU6pB,EAAUplB,KAAK,CAC9B8d,WACAkI,OAAQH,EAAUrI,MAClBkH,KAAMD,GAAmBoB,EAAUrI,aAOnDM,GAAW,MAAGjD,OAAH,qBAAiB4K,GAAjB,CAAmCb,KAAclW,QAAO,SAAA5Y,GAAA,OAAW,OAANA,KACxEsvB,EAAUplB,KAAK,CACX8d,WACAiG,wBAAmBA,EAAnB,GAAqChd,EAAOgd,eAAiB,OAIrE,IAAMkC,EAAYpB,EAAWrH,MAEvB0I,EAAahwB,OAAO8Q,OAAO,CAC7Bmf,kBAAmBvB,EACnBK,uBACDle,GAEGqf,EAAmBvB,EAAWwB,aAChClB,GAA+BiB,IAC/B3F,EAAYP,GAAuBkG,EAAkBtI,EAAU,CAC3DuC,gBAAiB8E,IAErBvB,GAAqBwC,EAAkB3F,EAAWyF,IAGtDd,EAAU1iB,SAAQ,SAAC4jB,GACf,IAAMC,EAAmBrG,GAAuB+F,EAAWK,EAAIxI,UACzD4G,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAM8B,EA3HO,SAAC/F,EAAWiE,GACjC,IAAK,IAAIlvB,EAAI,EAAG+M,EAAMmiB,EAAKnpB,OAAQ/F,EAAI+M,EAAK/M,IAAK,CAC7C,IAAMgoB,EAAQkH,EAAKlvB,GACnBirB,EAAY2C,GAA8B3C,EAAWjD,GAEzD,OAAOiD,EAsHuBgG,CAAiBF,EAAkB7B,EAAKgC,WAC9DJ,EAAIN,OAAO/B,kBAAkBuC,EAAeN,QAE5CtC,GAAqBqC,EAAWM,EAAkBL,EAAY,CAC1DnC,cAAeuC,EAAIvC,cACnBD,mBAAoBqB,GAA+BiB,QAMtDO,GAA4B,SAAC5B,EAAsBF,EAAYC,GACxE,IAAM8B,EAAmB7B,EAAqB6B,iBAE9C,IAAK,IAAMjB,KAAUiB,EAAkB,CACnC,IACMd,EADYc,EAAiBjB,GACN5e,OACvBke,EAAsBH,EAAe/d,OAAOme,SAC5C2B,GAAwB/B,EAAeoB,WAAWW,uBACpD/B,EAAeoB,WAAWW,sBAAsBf,EAAYhB,EAAe/d,QAC/E,GAAI+e,EAAWZ,WAAaD,GAAuB4B,EAAuB,CACtE,IAAMC,EAAgBhB,EAAWhI,SACjC6G,GAAyBmC,EAAejC,EAAY,CAChDE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXa,MAKFiB,GAAqB,SAAChC,GAA6C,IAAvBhe,EAAuB,uDAAd,GAAIyW,EAAU,aACxEwJ,SACEC,EAAkBlgB,EAAOkgB,gBACzBnJ,EAAW/W,EAAO+W,SAClB/mB,EAASgQ,EAAO4e,OAAhB,IAA0B5e,EAAOme,SAGnC8B,EADAC,EACkBlC,EAAqBS,eAErBT,EAAqB6B,iBAG1B,OAAb9I,SACOkJ,EAAgBjwB,GAEvBiwB,EAAgBjwB,GAAO,CACnBymB,QACAzW,WAQCmgB,GAAyB,SAACxF,EAAWC,EAAWwF,GACzD,IAAMC,EAAsB1F,EAAUnU,QAAO,SAACC,EAAKrI,GAM/C,MAL+B,WAA3BA,EAAMkiB,YAAYtxB,KAClByX,EAAIxN,KAAJ,MAAAwN,EAAA,GAAYmU,EAAUjT,QAAO,SAAAd,GAAA,OAA0C,IAA7BA,EAAU0Z,OAAOniB,QACpDA,KAASgiB,GAChB3Z,EAAIxN,KAAKmF,GAENqI,IACR,IACH,OAAO5K,MAAMC,KAAK,IAAI+S,IAAIwR,IAAsBrlB,KAAI,SAAAoD,GAAA,OAASA,EAAMuQ,WAU1D3P,GAAwB,SAACZ,EAAO1O,GACzC,OAAI0O,EAAMmQ,aACCnQ,EAAMmQ,cAANnQ,CAAqB1O,GAEzBA,G,0PC/II8wB,G,WA/hBX,c,4FAAwB,SACpB,IAAIC,SAEJ5tB,KAAK0qB,QAAU,KACf1qB,KAAK+jB,YAAc,GACnB/jB,KAAKskB,oBAAsB,GAC3BtkB,KAAKuqB,UAAY,GANG,2BAARV,EAAQ,qBAARA,EAAQ,gBAQE,IAAlBA,EAAOloB,SAAkBisB,EAAS/D,EAAO,cAAe8D,GAExD3tB,KAAK0W,eAAiBkX,EAAOlX,eAC7B1W,KAAK2Q,YAAcid,EAAOjd,YAC1B3Q,KAAKupB,YAAcqE,EAAOrE,YAC1BvpB,KAAK0qB,QAAUkD,EACf5tB,KAAKopB,mBAAqBppB,KAAK0qB,QAAQtB,mBACvCppB,KAAK6tB,gBAAkBvjB,IACvBtK,KAAKia,wBAAwBkM,0BAE7ByC,GAAUA,cAAC5oB,MAAX,OAAoB6pB,IACpB7pB,KAAK6tB,gBAAkB7tB,KAAKopB,mBAAmBjtB,KAC/C6D,KAAKia,wBAAwBkM,wBAC7BnmB,KAAK8tB,sBAAwB,CACzBlC,eAAgB,GAChBoB,iBAAkB,K,+CA0B1B,OAAOhtB,KAAKoQ,gBAAgBvH,OAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEyP,c,gCAY9C,OAAO3L,KAAK6tB,kB,sCAIZ,OAAO7tB,KAAK+tB,c,8CAMZ,OAFA/tB,KAAK+tB,YAAc5K,GAAa,CAACnjB,KAAK2Q,YAAa3Q,KAAK0W,gBACnD1W,KAAK8lB,uBAAwB9lB,KAAK6tB,iBAChC7tB,O,6CAIP,OAAOA,KAAKopB,qB,2BAiCV4E,EAAUje,GACZ,OAAOH,EAAa5P,KAAMguB,EAAUje,K,kCAuB3Bie,GACT,OAAOpe,EAAa5P,KAAMguB,EAAU9T,GAAkBla,KAAMguB,IAAW,K,4BAqBpEC,GACH,OAAO3T,GAAMta,KAAMiuB,K,iCAoBXC,GACR,OAAO7X,GAAWrW,KAAMkuB,K,6BAkDpBtI,EAAUzY,GACd,IAAMghB,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAMf,OAJAta,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,IAC/BpQ,KAAOoQ,EAAOpQ,MAAQoxB,EAAUpxB,KDkChB,SAACqnB,EAAUwB,EAAUgC,EAAcwG,GAC9D,IAAIC,EAAe,GAEbtxB,EAAS6qB,EAAT7qB,KAEAirB,EAAS5D,EAAS8B,MAAMkI,EAAY3G,WACpC6G,EAAmB5I,GACrBsC,EACApC,EACAgC,EACAxD,EACAU,IAEEL,EAAYD,GAAcznB,GAAM0nB,UAItC,OAFAkD,GAAqBK,EAAQsG,EAAiB7J,EAAU,IAAKL,EAAUwD,EAAchC,GAEjFnB,EAAU9iB,OAAS,GACnB0sB,EAAejK,EAAS8B,MAAMkI,EAAY3G,WAC1CE,GAAqB0G,EAAcC,EAAiB7J,EAAU,IAAKL,EAAUwD,EAAchC,GACpF,CAACoC,EAAQqG,IAGbrG,ECtDIuG,CACHvuB,KACA4lB,EACAzY,EAJgB,CAAEsa,UAAWta,EAAOsa,c,gCA4BxC,OAAQznB,KAAK2Q,YAAYhP,SAAW3B,KAAK0W,eAAe/U,S,8BAUnC,IAAlB8lB,IAAkB,yDACf9B,EAAW,IAAI3lB,KAAKytB,YAAYztB,MAMtC,OALIynB,EACA9B,EAAS6I,UAAUxuB,MAEnB2lB,EAAS6I,UAAU,MAEhB7I,I,8BA8CFmC,EAAW3a,GAChB,IAAMghB,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAEfta,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,GACtC,IAAMogB,EAAcvtB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKunB,GACtBxwB,EAASoQ,EAATpQ,KACFywB,EAAsBF,GAAuBxF,EAAWC,EAAWwF,GAErE9U,SAEA1b,IAAS8B,EAAcG,IASvByZ,EAAY,CARUoP,GAAiB7nB,KAAMwtB,EAAqB,CAC9DzwB,KAAM8B,EAAcC,OACpB2oB,UAAWta,EAAOsa,WACnBM,GACkBF,GAAiB7nB,KAAMwtB,EAAqB,CAC7DzwB,KAAM8B,EAAcE,QACpB0oB,UAAWta,EAAOsa,WACnBM,IAIHtP,EADsBoP,GAAiB7nB,KAAMwtB,EAAqBrgB,EAAQ4a,GAI9E,OAAOtP,I,wCAIP,OAAOzY,KAAKyuB,e,8CAWZ,OAPAzuB,KAAKyuB,aAAezuB,KAAK+tB,YAAYllB,OAAO8K,QAAO,SAACC,EAAK8a,EAAU9yB,GAK/D,OAJAgY,EAAI8a,EAASvyB,QAAU,CACnB+F,MAAOtG,EACP+yB,IAAKD,EAAS/iB,UAEXiI,IACR,IACI5T,O,gCAWPA,KAAK0qB,SAAW1qB,KAAK0qB,QAAQkE,YAAY5uB,MACzCA,KAAK0qB,QAAU,KACf1qB,KAAKuqB,UAAUzhB,SAAQ,SAAC0hB,GACpBA,EAAME,QAAU,QAEpB1qB,KAAKuqB,UAAY,K,kCA6BRC,GACT,IAAI3W,EAAM7T,KAAKuqB,UAAUtV,WAAU,SAAA4Z,GAAA,OAAWA,IAAYrE,MACjD,IAAT3W,GAAa7T,KAAKuqB,UAAU/iB,OAAOqM,EAAK,K,gCAQjCib,GACP9uB,KAAK0qB,SAAW1qB,KAAK0qB,QAAQkE,YAAY5uB,MACzCA,KAAK0qB,QAAUoE,EACfA,GAAUA,EAAOvE,UAAUnkB,KAAKpG,Q,kCA4BhC,OAAOA,KAAK0qB,U,oCA6BZ,OAAO1qB,KAAKuqB,Y,uCA4BZ,OAAOvqB,KAAK+jB,c,+CA4BZ,OAAO/jB,KAAKskB,wB,uwBCwSL1mB,G,YAtxBX,aAAsB,O,4FAAA,oCAANsJ,EAAM,qBAANA,EAAM,sB,iKAAA,2EACTA,KADS,OAGlB,EAAK6nB,eAAiB,GAHJ,E,0WAgFbloB,GAQLA,EAAUvK,OAAO8Q,OAAO,GAPL,CACf4hB,MAAO,MACP3sB,UAAW,KACX4sB,SAAS,EACTC,cAAc,EACdld,KAAM,IAE8BnL,GACxC,IAAMgC,EAAS7I,KAAK8lB,uBAAuBjd,OAErCsmB,EAAgBxZ,GAAY5Z,KAC9BiE,KACAA,KAAK8lB,uBAAuBjd,OAC5B7I,KAAK2Q,YACL9J,EAAQqoB,aAAermB,EAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEC,UAAQ8G,OAASjD,KAAK0W,eAC/D7P,EAAQmL,KACR,CACI8D,WAA8B,WAAlBjP,EAAQmoB,MACpBnZ,SAAUhP,EAAQooB,UAI1B,IAAKpoB,EAAQxE,UACT,OAAO8sB,EAxBG,IA2BN9sB,EAAcwE,EAAdxE,UACA0I,EAAuBokB,EAAvBpkB,KAAMY,EAAiBwjB,EAAjBxjB,OAAQ8J,EAAS0Z,EAAT1Z,KAChB2Z,EAAazjB,EAAOxD,KAAK,SAAA9E,GAAA,OAAKA,EAAElH,QAEhCkzB,EADgB/yB,OAAO0J,KAAK3D,GACAsR,QAAO,SAACC,EAAK5F,GAC3C,IAAM6F,EAAMub,EAAWjpB,QAAQ6H,GAI/B,OAHa,IAAT6F,GACAD,EAAIxN,KAAK,CAACyN,EAAKxR,EAAU2L,KAEtB4F,IACR,IAgCH,MA9BsB,WAAlB/M,EAAQmoB,MACRK,EAAYvmB,SAAQ,SAACwmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBvkB,EAAKwkB,GAAMzmB,SAAQ,SAACoK,EAAOuc,GACvB1kB,EAAKwkB,GAAME,GAAYD,EAAMzzB,UACzBmE,EACAgT,EACAuC,EAAKga,GACL9jB,EAAO4jB,UAKnBxkB,EAAKjC,SAAQ,SAACoK,EAAOuc,GACjBJ,EAAYvmB,SAAQ,SAACwmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBpc,EAAMqc,GAAQC,EAAMzzB,UAChBmE,EACAgT,EAAMqc,GACN9Z,EAAKga,GACL9jB,EAAO4jB,UAMhBJ,I,gCASP,IAAMxiB,EAAa3M,KAAK2Q,YAClB+e,EAAM,GAER/iB,EAAWhL,QACMgL,EAAWE,MAAM,KAEzB/D,SAAQ,SAACuK,GAAQ,MACHA,EAAIxG,MAAM,KAAK1E,IAAIO,QADhB,UACjBsE,EADiB,KACVC,EADU,KAGtBA,OAAc/M,IAAR+M,EAAoBA,EAAMD,EAChC0iB,EAAItpB,KAAJ,MAAAspB,EAAA,GAAY1mB,MAAMiE,EAAMD,EAAQ,GAAG2iB,OAAOxnB,KAAI,SAAC2R,EAAGjG,GAAJ,OAAY7G,EAAQ6G,UAI1E,OAAO6b,I,8BA0BFE,GAAwD,IAA7ClX,EAA6C,uDAAlC,GAAIvL,EAA8B,uDAArB,CAAEsa,WAAW,GAC/CqC,EAAgBA,GAAG8F,EAAU3sB,OAC/B4mB,EAAS,CAAC7pB,KAAM4vB,EAAWlX,GACzBiB,EAAenB,gBAAWqR,GAgBhC,OAdAtF,GACIvkB,KACA2Z,EACApL,EAAeG,QACf,CAAEkhB,YAAW9F,gBAAe7Q,eAAgBV,GAAaU,kBACzDP,GAGAvL,EAAOsa,UACP9N,EAAa6U,UAAUxuB,MAEvB2Z,EAAa6U,UAAU,MAGpB7U,I,2BAsDL5F,GAA+C,IAA/B5G,EAA+B,uDAAtB,CAAEsa,WAAW,GAClCxE,EAAUjjB,KAAK+mB,QAAQ,CACzBiI,MAAO,MACPhd,KAAM+B,IAEJoK,EAAS8E,EAAQtX,OAAOxD,KAAI,SAAAoD,GAAA,OAASA,EAAMpP,QAC3C0zB,EAAe,CAAC1R,GAAQ8C,OAAOgC,EAAQlY,MAEvC+kB,EAAW,IAAI9vB,KAAKytB,YAAYoC,EAAc5M,EAAQtX,OAAQ,CAAEmR,WAAY,WAgBlF,OAdAyH,GACIvkB,KACA8vB,EACAvhB,EAAeO,KACf3B,EACA4G,GAGA5G,EAAOsa,UACPqI,EAAStB,UAAUxuB,MAEnB8vB,EAAStB,UAAU,MAGhBsB,I,gCAwBAlkB,EAAM/E,GACb+E,EAAOA,GAAQ5L,KAAKupB,YACpB1iB,EAAUvK,OAAO8Q,OAAO,GAAI,CAAE6U,eAAgB,KAAOpb,GAErD,IAAMgC,EAAS7I,KAAKoQ,gBAAgBvH,OAC9BknB,EAAUlnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAEnD,mBAC5B8e,EAAYD,EAAQ,GAAGpuB,OACzBsuB,SACAC,SACAC,SAEJ,GAAIvkB,IAAS7N,EAAWC,UAEpB,IADAiyB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAM5a,EAAM,GACZ,IAAK6a,EAAS,EAAGA,EAAStnB,EAAOlH,OAAQwuB,IACrC7a,EAAIzM,EAAOsnB,GAAQh0B,QAAU4zB,EAAQI,GAAQD,GAEjDD,EAAe7pB,KAAKkP,QAErB,GAAI1J,IAAS7N,EAAWE,QAAS,CAEpC,IADAgyB,EAAiB,CAACpnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK4D,EAAQob,iBACpDiO,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAM5a,EAAM,GACZ,IAAK6a,EAAS,EAAGA,EAAStnB,EAAOlH,OAAQwuB,IACrC7a,EAAIlP,KAAK2pB,EAAQI,GAAQD,IAE7BD,EAAe7pB,KAAKkP,EAAIrS,KAAK4D,EAAQob,iBAEzCgO,EAAiBA,EAAehtB,KAAK,UAClC,IAAI2I,IAAS7N,EAAWG,QAU3B,MAAM,IAAIuS,MAAJ,aAAuB7E,EAAvB,qBARN,IADAqkB,EAAiB,CAACpnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,WAC/B+zB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAM5a,EAAM,GACZ,IAAK6a,EAAS,EAAGA,EAAStnB,EAAOlH,OAAQwuB,IACrC7a,EAAIlP,KAAK2pB,EAAQI,GAAQD,IAE7BD,EAAe7pB,KAAKkP,IAM5B,OAAO2a,I,+BAGD1kB,GACN,IAAMyI,EAAYzI,EAAMpP,OACxB6D,KAAK0W,gBAAL,IAA2B1C,EAC3B,IAAMoP,EAAoBpjB,KAAKopB,mBACzBgH,EAAqBhN,EAAkBkG,oBACvCpY,EAAgB3F,EAAM2F,gBACtB+R,EAAU1X,EAAM0F,aAAalG,KAEnC,GAAKqY,EAAkB/X,YAAYE,EAAMpP,QAKlC,CACH,IAAM4M,EAAaqa,EAAkBva,OAAOoM,WAAU,SAAAob,GAAA,OAAaA,EAAUl0B,SAAW6X,KACxFjL,GAAc,IAAMqa,EAAkBva,OAAOE,GAAcwC,QAN3D6X,EAAkBva,OAAOzC,KAAKmF,GAC9B6kB,EAAmBtnB,SAAQ,SAACV,EAAKxM,GAC7BwM,EAAImD,EAAMpP,QAAU,IAAI6P,EAAMkF,EAActV,GAAIqnB,EAAQrnB,GAAI2P,MAapE,OALA6X,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KAEnC1L,KAAKia,wBAAwBkM,wBACtBnmB,O,wCAuCQ2L,EAAQ2kB,EAAYnjB,GAAQ,WAC3CxB,EAAS2c,GAAmB3c,GAC5BwB,EAAS7Q,OAAO8Q,OAAO,GAAI,CAAEqa,WAAW,EAAM8I,YAAY,GAASpjB,GAEnE,IAAM6Z,EAAehnB,KAAK4mB,kBACpB4J,EAAUF,EAAWlc,MAAM,EAAGkc,EAAW3uB,OAAS,GAClD8uB,EAAaH,EAAWA,EAAW3uB,OAAS,GAElD,GAAIqlB,EAAarb,EAAOxP,QAAUgR,EAAOojB,WACrC,MAAM,IAAI9f,MAAS9E,EAAOxP,KAApB,sCAGV,IAAMu0B,EAAkBF,EAAQroB,KAAI,SAACoD,GACjC,IAAMolB,EAAY3J,EAAazb,GAC/B,IAAKolB,EAED,MAAM,IAAIlgB,MAASlF,EAAb,gCAEV,OAAOolB,EAAUzuB,SAGfgkB,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAE1BmJ,EAAK1K,EAAM9V,gBAAgBvH,OAC3BgoB,EAAiBH,EAAgBvoB,KAAI,SAAA0L,GAAA,OAAO+c,EAAG/c,MAEjDkG,EAAc,GACdC,EAAgB,kBAAM,EAAK1I,gBAEzBwf,EAAiB,GACvBpkB,EAAmBwZ,EAAMvV,aAAa,SAAC/U,GACnC,IAAMm1B,EAAaF,EAAe1oB,KAAI,SAAAoD,GAAA,OAASA,EAAM0F,aAAalG,KAAKnP,MACvEk1B,EAAel1B,GAAK60B,kBAAcM,GAAd,QAA0Bn1B,EAAGoe,EAAeD,QAhCzB,MAkC3BgE,GAAa,CAAC+S,GAAiB,CAACnlB,GAAS,CAACA,EAAOxP,OAA1DoP,EAlCoC,WA6C3C,OAVA2a,EAAM8K,SAASzlB,GAEfgZ,GACIvkB,KACAkmB,EACA3X,EAAeK,QACf,CAAEzB,OAAQxB,EAAQ9C,OAAQ2nB,GAC1BC,GAGGvK,I,gCAWA8E,GAA2D,IAA9C7d,EAA8C,uDAArC,GAAI8jB,EAAiC,aAAjB3E,EAAiB,uDAAJ,GACxDe,EAAkBlgB,EAAOkgB,gBACzBhC,EAAsBle,EAAOme,SAC7B4F,EAAU/jB,EAAO+jB,QACjB7E,EAAYzB,GAAiB5qB,MAC7BmrB,EAAuBkB,EAAUyB,sBACjCtB,EAAmB/B,GAAoBzqB,MACvCirB,EAAa,CACfwB,aAAcD,EACd5I,MAAOyI,GAgBX,OAbA4E,GAAkB9D,GAAmBhC,EAAsBhe,EAAQnN,MACnE+qB,GAAyBC,EAAaC,EAAY,CAAEE,uBAAsBG,SAAUD,GAChF/uB,OAAO8Q,OAAO,CACV8jB,WACD/jB,IAEHkgB,GACAN,GAA0B5B,EAAsBF,EAAY,CACxD9d,SACAmf,eAIDtsB,O,yBAUPmxB,EAAWvkB,GACX,OAAQukB,GACR,I5CplBmB,c4CqlBfnxB,KAAK+uB,eAAe3oB,KAAKwG,GAG7B,OAAO5M,O,kCASEmxB,GACT,OAAQA,GACR,I5CnmBmB,c4ComBfnxB,KAAK+uB,eAAiB,GAI1B,OAAO/uB,O,wCAUQ6mB,EAAWqK,GAAS,WACflxB,KAAK+uB,eACXjmB,SAAQ,SAAAud,GAAA,OAAMA,EAAGtqB,KAAK,EAAM8qB,EAAWqK,Q,0BA8CpDE,EAAkBjkB,GACnB,IAAM6Z,EAAehnB,KAAK4mB,kBAE1B,IAAKI,EAAaoK,GACd,MAAM,IAAI3gB,MAAJ,SAAmB2gB,EAAnB,kBAGV,IAAMC,EAAelkB,EAAOhR,MAAWi1B,EAAlB,UAErB,GAAIpK,EAAaqK,GACb,MAAM,IAAI5gB,MAAJ,SAAmB4gB,EAAnB,mBAGV,IAb2B,E7CvnB5B,SAAgCC,EAAc3kB,EAAYQ,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAASwjB,EAAmCpkB,EAAnCokB,UAAWzjB,EAAwBX,EAAxBW,QAASd,EAAeG,EAAfH,MAAOC,EAAQE,EAARF,IAD2B,EAEhDqkB,EAAarV,SAFmC,SAE9DuV,EAF8D,KAExDC,EAFwD,KAIhE1jB,IACDf,EAAmB,IAAVA,KAAiBA,GAASA,EAAQwkB,GAASA,EAAOxkB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMwkB,GAAUA,EAAO,EAAKxkB,EAErDskB,IACAzjB,EAAU9J,KAAK0tB,KAAK1tB,KAAK2tB,IAAI1kB,EAAMD,GAASukB,IAGhDxjB,EAAUF,EAAgBC,EAASd,EAAOC,IAG1Cc,EAAQ,GAAKyjB,GACbzjB,EAAQpG,QAAQ6pB,GAEhBzjB,EAAQA,EAAQpM,OAAS,IAAM8vB,GAC/B1jB,EAAQ3H,KAAKqrB,EAAO,GAIxB,IADA,IAAMvjB,EAAe,GACZtS,EAAI,EAAGA,EAAImS,EAAQpM,OAAS,EAAG/F,IACpCsS,EAAa9H,KAAK,CACd4G,MAAOe,EAAQnS,GACfqR,IAAKc,EAAQnS,EAAI,KAIzB,IAAMg2B,EAAa,GAYnB,OAXAllB,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMsX,EAAQoe,EAAargB,aAAalG,KAAKnP,GAC7C,GAAIsX,aAAiBhG,EACjB0kB,EAAWxrB,KAAK8M,OADpB,CAKA,IAAM1R,EAAQyM,EAAgBC,EAAcgF,GAC5C0e,EAAWxrB,KAAQ5E,EAAMwL,MAAzB,IAAkCxL,EAAMyL,SAGrC,CAAE2kB,aAAYvU,KAAMtP,G6C2lBM8jB,CADR7xB,KAAKoQ,gBAAgB/E,YAAY+lB,GACWpxB,KAAK2Q,YAAaxD,GAA3EykB,EAdmB,EAcnBA,WAAYvU,EAdO,EAcPA,KAEdyU,EAAW/T,GAAa,CAAC6T,GAAa,CACxC,CACIz1B,KAAMk1B,EACNzlB,KAAMlN,EAAUE,UAChBmc,QAAS3c,EAAiBG,OAC1B8e,SACA,CAACgU,IAAe,GAElBnL,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAWhC,OAVAvB,EAAM8K,SAASc,GAEfvN,GACIvkB,KACAkmB,EACA3X,EAAeM,IACd,CAAEuiB,mBAAkBjkB,SAAQkkB,gBAC5B,MAGEnL,I,qCA8BP,OAAO,IAAItoB,EAHEoC,KAAK+xB,UAAUh0B,EAAWC,WACxBgC,KAAKgyB,e,iCA+CZvY,EAAcL,EAAWjM,GACjC,IAAM6Z,EAAehnB,KAAK4mB,kBAE1BnN,EAAa3Q,SAAQ,SAACkL,GAClB,IAAKgT,EAAahT,GACd,MAAM,IAAIvD,MAAJ,SAAmBuD,EAAnB,mCAId,IAAMma,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAKf,OF5iBuB,SAACrD,EAAU3K,GAAiD,IAAnCL,EAAmC,uDAAvB,SAAAjY,GAAA,OAAOA,GAAKgM,EAAW,aAEnFsa,EACAta,EADAsa,UAEElO,EAAgB6K,EAAShU,gBAAgB/E,YAJwC,EASnFqa,GACAtB,EAAS8B,MAAMuB,GACfrO,EACAjM,EACAiX,GACA,sCAAIyF,EAAJ,qBAAIA,EAAJ,uBAAevE,GAAuBA,aAAIuE,EAA3B,QAAmCpQ,EAAcF,QAPhEgM,EAPmF,EAOnFA,gBACAC,EARmF,EAQnFA,aASEyM,EAAY,GAoBlB,OAnBA31B,OAAO0J,KAAKuf,GAAiBvT,OAAOlJ,SAAQ,SAACzF,GACzC,GAAIkiB,EAAgBliB,GAAI,CACpB,IAAM2kB,EAAS5D,EAAS8B,MAAMuB,GACxBmC,EAAapE,EAAaniB,GAChC2kB,EAAOrX,YAAc4U,EAAgBliB,GAAGJ,KAAK,KAC7C+kB,EAAO/N,wBAAwBkM,wBAI3BsB,GACAlD,GAAmBH,EAAU4D,EAAQzZ,EAAeC,OAAQrB,GAHtC,SAAAtE,GAAA,OAAU4Q,EAAa8N,OAAM,SAAAzN,GAAA,OAAKjR,EAAOiR,GAAGO,gBAAkBuP,EAAW5jB,KAAK8T,SAKxGkO,EAAOjE,YAAYiE,EAAOjE,YAAYpiB,OAAS,GAAGsiB,KAAOuB,EAAaniB,GAEtE4uB,EAAU7rB,KAAK4hB,OAKhBiK,EEugBIC,CAAgBlyB,KAAMyZ,EAAcL,EAF3CjM,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,M,sCAyCmB,IAA9CglB,EAA8C,uDAA/B,GAAIC,EAA2B,uDAAZ,GAAIjlB,EAAQ,aACnDghB,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAET8F,EAAcvtB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKunB,GACxB8E,EAA0B,CAAC,CAACD,IAalC,OAXAjlB,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,IACtCglB,EAAeA,EAAaxwB,OAASwwB,EAAe,CAAC,KAGxCrpB,SAAQ,SAACwpB,EAAU12B,GAC5By2B,EAAwBz2B,GAAK0xB,GAAuBA,GAADA,UAC3CgF,GADqB,GACRF,IACjBrK,EACAwF,MAGDpF,GAAiBnoB,KAAMqyB,EAAyBllB,EAAQ4a,M,kDApuBhC5a,GAC/B,OAAOD,EAAkBI,iBAAiBH,K,+BA7B1C,OAAOoL,K,iCAOP,OAAOwK,K,iCAOP,OAAOjF,O,GAnES6P,ICxCT1W,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKK,GAA0CF,GAA1CE,IAAKC,GAAqCH,GAArCG,IAAK6a,GAAgChb,GAAhCgb,MAAOC,GAAyBjb,GAAzBib,KAAMC,GAAmBlb,GAAnBkb,MAAYC,GAAOnb,GAAZob,ICwBjDC,GAAY,CACdC,QC4LmB,sCAAIC,EAAJ,qBAAIA,EAAJ,uBACnB,SAAClc,GAAqC,IAAjCzJ,EAAiC,uDAAxB,CAAEsa,WAAW,GACnBsL,EAAYnc,EACZoc,SACEvJ,EAAc,GA8BpB,OA5BAqJ,EAAWhqB,SAAQ,SAAC+a,GAChBkP,EAAYlP,EAAUkP,GACtBtJ,EAAYrjB,KAAZ,MAAAqjB,EAAA,EAAoBsJ,EAAUhP,cACzBiP,IACDA,EAAaD,MAIjBC,GAAcA,IAAeD,GAC7BC,EAAWC,UAIfF,EAAUzO,oBAAsB,GAChCC,GACI3N,EACAmc,EACAxkB,EAAeI,QACf,KACA8a,GAGAtc,EAAOsa,UACPsL,EAAUvE,UAAU5X,GAEpBmc,EAAUvE,UAAU,MAGjBuE,ID7NXG,IC0He,sCAAIhsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGsc,IAAH,MAAAtc,EAAU1P,KDzH5CsgB,OC8BkB,sCAAItgB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4Q,OAAH,MAAA5Q,EAAa1P,KD7BlDisB,QC6DmB,sCAAIjsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGuc,QAAH,MAAAvc,EAAc1P,KD5DpDsR,QCoJmB,sCAAItR,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4B,QAAH,MAAA5B,EAAc1P,KDnJpDksB,kBEzB6B,sCAAIlsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGwc,kBAAH,MAAAxc,EAAwB1P,KF0BxE8K,KEjBgB,sCAAI9K,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG5E,KAAH,MAAA4E,EAAW1P,KFkB9C0I,eACAyG,cACAgd,YGjCG,SAAsB7Y,EAAYC,GACrC,OAAO7K,EAAa4K,EAAYC,EAAYP,GAAkBM,EAAYC,IAAa,IHiCvFF,iBACAG,kBACA4Y,clC1BG,SAAwB9Y,EAAYC,EAAY1K,GACnD,OAAOuK,GAAMC,GAAcC,EAAYC,EAAY1K,GAAW2K,GAAeF,EAAYC,EAAY1K,KkC0BrGuK,UAGEiZ,G,KAAcA,QACpBj3B,OAAO8Q,OAAOxP,GAAW,CACrBg1B,aACAY,QACAjlB,iBACAzO,oBACA/B,aACAc,gBACAqO,oBACAqmB,WACAjV,iBACAmV,iBACDC,GAEY91B","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format : this.schema.format }));\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field'\nimport FieldParser from '../parsers/field-parser';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser(){\n throw new Error(\"Not yet implemented\")\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER(){\n const builder = {\n _params : {},\n _context : this,\n fieldName : function(name) {\n this._params.name = name;\n return this;\n },\n schema : function(schema){\n this._params.schema = schema;\n return this;\n },\n data : function(data){\n this._params.data = data;\n return this;\n },\n partialField : function(partialField){\n this._params.partialField = partialField\n return this;\n },\n rowDiffset : function(rowDiffset){\n this._params.rowDiffset = rowDiffset\n return this;\n },\n build : function(){\n let partialField = null;\n if(this._params.partialField instanceof PartialField){\n partialField = this._params.partialField\n }else if(this._params.schema && this._params.data ){\n partialField = new PartialField(this._params.name, this._params.data, this._params.schema, this._context.parser())\n }\n else {\n throw new Error(\"Invalid Field parameters\")\n }\n return new this._context(partialField,this._params.rowDiffset);\n }\n }\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser'\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser(){\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n // constructor (schema) {\n // super();\n // this.schema = schema;\n // this._dtf = new DateTimeFormatter(format);\n // }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format } ) {\n let result;\n // check if invalid date value\n if(!this._dtf){\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser'\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser(){\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser'\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser(){\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser'\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser(){\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype ,MeasureSubtype} from '../enums'\n\n\nclass FieldTypeRegistry{\n constructor(){\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype,dimension){\n this._fieldType.set(subtype,dimension);\n return this;\n }\n\n has(type){\n return this._fieldType.has(type);\n }\n\n get(type){\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL,Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL,Temporal)\n .registerFieldType(DimensionSubtype.BINNED,Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS,Continuous)\n}\n\nconst fieldRegistry = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n }\n return store;\n }\n return getStore();\n}());\n\nexport default fieldRegistry;\n\n\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\nimport { fieldRegistry } from './fields'\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\n// function createUnitField(data, schema) {\n// data = data || [];\n// let partialField;\n\n// switch (schema.type) {\n// case FieldType.MEASURE:\n// switch (schema.subtype) {\n// case MeasureSubtype.CONTINUOUS:\n// partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n// return new Continuous(partialField, `0-${data.length - 1}`);\n// default:\n// partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n// return new Continuous(partialField, `0-${data.length - 1}`);\n// }\n// case FieldType.DIMENSION:\n// switch (schema.subtype) {\n// case DimensionSubtype.CATEGORICAL:\n// partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n// return new Categorical(partialField, `0-${data.length - 1}`);\n// case DimensionSubtype.TEMPORAL:\n// partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n// return new Temporal(partialField, `0-${data.length - 1}`);\n// case DimensionSubtype.BINNED:\n// partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n// return new Binned(partialField, `0-${data.length - 1}`);\n// default:\n// partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n// return new Categorical(partialField, `0-${data.length - 1}`);\n// }\n// default:\n// partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n// return new Categorical(partialField, `0-${data.length - 1}`);\n// }\n// }\n\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if(fieldRegistry.has(schema.subtype)){\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build()\n } else {\n return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build()\n \n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if(fieldRegistry.has(schema.subtype)){\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build()\n } else {\n return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build()\n \n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter{\n constructor(type){\n this._type = type;\n }\n\n get type(){\n return this._type;\n }\n\n convert(data,schema,options){\n throw new Error(\"Convert method not implemented.\")\n }\n\n}","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from \"../model/dataConverter\";\nimport DSVStr from \"../utils/dsv-str\";\nimport DataFormat from '../../enums/data-format'\n\nexport default class DSVStringConverter extends DataConverter{\n constructor(){\n super(DataFormat.DSV_STR)\n }\n\n convert(data , schema , options){\n return DSVStr(data,schema,options);\n }\n} ","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from \"../model/dataConverter\";\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format'\n\nexport default class JSONConverter extends DataConverter{\n constructor(){\n super(DataFormat.FLAT_JSON)\n }\n\n convert(data , schema , options){\n return FlatJSON(data,schema,options);\n }\n} ","import DataConverter from \"../model/dataConverter\";\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format'\n\nexport default class DSVArrayConverter extends DataConverter{\n constructor(){\n super(DataFormat.DSV_ARR);\n }\n\n convert(data , schema , options){\n return DSVArr(data,schema,options);\n }\n} ","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from \"../model/dataConverter\";\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format'\n\nexport default class AutoDataConverter extends DataConverter{\n constructor(){\n super(DataFormat.AUTO)\n }\n\n convert(data , schema , options){\n return AUTO(data,schema,options);\n }\n} ","import DataConverter from './model/dataConverter'\nimport DSVStringConverter from './defaultConverters/dsvStringConverter';\nimport JSONConverter from './defaultConverters/jsonConverter';\nimport DSVArrayConverter from './defaultConverters/dsvArrayConverter';\nimport AutoDataConverter from './defaultConverters/autoCoverter'\n\nclass DataConverterStore {\n constructor(){\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters(){\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ]\n }\n\n /**\n * \n * @param {Array} converters : contains array of converter instance\n * @return { Map } \n */\n converters(converters){\n if(converters.length){\n converters.forEach(converter => this.store.set(converter.type,converter));\n }\n return this.store;\n }\n\n /**\n * \n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter){\n if(converter instanceof DataConverter){\n this.store.set(converter.type,converter)\n }\n return this;\n }\n\n /**\n * \n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter){\n this.store.delete(converter.type)\n return this;\n }\n\n get(name){\n if(this.store.has(name)){\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new DataConverterStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default converterStore;","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n \n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter'\nimport { fieldRegistry } from './fields'\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters(){\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes(){\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter'\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/src/converter/dataConverterStore.js b/src/converter/dataConverterStore.js index 1cde90a..81cf09e 100644 --- a/src/converter/dataConverterStore.js +++ b/src/converter/dataConverterStore.js @@ -1,61 +1,61 @@ -import DataConverter from './model/dataConverter' +import DataConverter from './model/dataConverter'; import DSVStringConverter from './defaultConverters/dsvStringConverter'; import JSONConverter from './defaultConverters/jsonConverter'; import DSVArrayConverter from './defaultConverters/dsvArrayConverter'; -import AutoDataConverter from './defaultConverters/autoCoverter' +import AutoDataConverter from './defaultConverters/autoCoverter'; class DataConverterStore { - constructor(){ + constructor() { this.store = new Map(); this.converters(this._getDefaultConverters()); } - _getDefaultConverters(){ + _getDefaultConverters() { return [ new DSVStringConverter(), new DSVArrayConverter(), new JSONConverter(), new AutoDataConverter() - ] + ]; } /** - * + * * @param {Array} converters : contains array of converter instance - * @return { Map } + * @return { Map } */ - converters(converters){ - if(converters.length){ - converters.forEach(converter => this.store.set(converter.type,converter)); + converters(converters) { + if (converters.length) { + converters.forEach(converter => this.store.set(converter.type, converter)); } return this.store; } /** - * + * * @param {DataConverter} converter : converter Instance * @returns self */ - register(converter){ - if(converter instanceof DataConverter){ - this.store.set(converter.type,converter) + register(converter) { + if (converter instanceof DataConverter) { + this.store.set(converter.type, converter); } return this; } /** - * + * * @param {DataConverter} converter : converter Instance * @returns self */ - unregister(converter){ - this.store.delete(converter.type) + unregister(converter) { + this.store.delete(converter.type); return this; } - get(name){ - if(this.store.has(name)){ + get(name) { + if (this.store.has(name)) { return this.store.get(name); } return null; @@ -75,4 +75,4 @@ const converterStore = (function () { return getStore(); }()); -export default converterStore; \ No newline at end of file +export default converterStore; diff --git a/src/converter/defaultConverters/autoCoverter.js b/src/converter/defaultConverters/autoCoverter.js index c93f05c..1777377 100644 --- a/src/converter/defaultConverters/autoCoverter.js +++ b/src/converter/defaultConverters/autoCoverter.js @@ -1,13 +1,13 @@ -import DataConverter from "../model/dataConverter"; +import DataConverter from '../model/dataConverter'; import AUTO from '../utils/auto-resolver'; -import DataFormat from '../../enums/data-format' +import DataFormat from '../../enums/data-format'; -export default class AutoDataConverter extends DataConverter{ - constructor(){ - super(DataFormat.AUTO) +export default class AutoDataConverter extends DataConverter { + constructor() { + super(DataFormat.AUTO); } - convert(data , schema , options){ - return AUTO(data,schema,options); + convert(data, schema, options) { + return AUTO(data, schema, options); } -} \ No newline at end of file +} diff --git a/src/converter/defaultConverters/dsvArrayConverter.js b/src/converter/defaultConverters/dsvArrayConverter.js index 9526fb2..9dd2db1 100644 --- a/src/converter/defaultConverters/dsvArrayConverter.js +++ b/src/converter/defaultConverters/dsvArrayConverter.js @@ -1,13 +1,13 @@ -import DataConverter from "../model/dataConverter"; +import DataConverter from '../model/dataConverter'; import DSVArr from '../utils/dsv-arr'; -import DataFormat from '../../enums/data-format' +import DataFormat from '../../enums/data-format'; -export default class DSVArrayConverter extends DataConverter{ - constructor(){ +export default class DSVArrayConverter extends DataConverter { + constructor() { super(DataFormat.DSV_ARR); } - convert(data , schema , options){ - return DSVArr(data,schema,options); + convert(data, schema, options) { + return DSVArr(data, schema, options); } -} \ No newline at end of file +} diff --git a/src/converter/defaultConverters/dsvStringConverter.js b/src/converter/defaultConverters/dsvStringConverter.js index b504b8b..fa052a2 100644 --- a/src/converter/defaultConverters/dsvStringConverter.js +++ b/src/converter/defaultConverters/dsvStringConverter.js @@ -1,13 +1,13 @@ -import DataConverter from "../model/dataConverter"; -import DSVStr from "../utils/dsv-str"; -import DataFormat from '../../enums/data-format' +import DataConverter from '../model/dataConverter'; +import DSVStr from '../utils/dsv-str'; +import DataFormat from '../../enums/data-format'; -export default class DSVStringConverter extends DataConverter{ - constructor(){ - super(DataFormat.DSV_STR) +export default class DSVStringConverter extends DataConverter { + constructor() { + super(DataFormat.DSV_STR); } - convert(data , schema , options){ - return DSVStr(data,schema,options); + convert(data, schema, options) { + return DSVStr(data, schema, options); } -} \ No newline at end of file +} diff --git a/src/converter/defaultConverters/jsonConverter.js b/src/converter/defaultConverters/jsonConverter.js index 8fc9b90..e4b1d2e 100644 --- a/src/converter/defaultConverters/jsonConverter.js +++ b/src/converter/defaultConverters/jsonConverter.js @@ -1,13 +1,13 @@ -import DataConverter from "../model/dataConverter"; +import DataConverter from '../model/dataConverter'; import FlatJSON from '../utils/flat-json'; -import DataFormat from '../../enums/data-format' +import DataFormat from '../../enums/data-format'; -export default class JSONConverter extends DataConverter{ - constructor(){ - super(DataFormat.FLAT_JSON) +export default class JSONConverter extends DataConverter { + constructor() { + super(DataFormat.FLAT_JSON); } - convert(data , schema , options){ - return FlatJSON(data,schema,options); + convert(data, schema, options) { + return FlatJSON(data, schema, options); } -} \ No newline at end of file +} diff --git a/src/converter/index.js b/src/converter/index.js index 0d7f8fc..135b30a 100644 --- a/src/converter/index.js +++ b/src/converter/index.js @@ -1,4 +1,4 @@ import converterStore from './dataConverterStore'; import DataConverter from './model/dataConverter'; -export { DataConverter, converterStore } ; +export { DataConverter, converterStore }; diff --git a/src/converter/model/dataConverter.js b/src/converter/model/dataConverter.js index fe055a2..0aa9385 100644 --- a/src/converter/model/dataConverter.js +++ b/src/converter/model/dataConverter.js @@ -1,17 +1,17 @@ /** * Interface for all data converters */ -export default class DataConverter{ - constructor(type){ +export default class DataConverter { + constructor(type) { this._type = type; } - get type(){ + get type() { return this._type; } - convert(data,schema,options){ - throw new Error("Convert method not implemented.") + convert() { + throw new Error('Convert method not implemented.'); } -} \ No newline at end of file +} diff --git a/src/create-fields.spec.js b/src/create-fields.spec.js index ac59fdd..c7a4fbd 100644 --- a/src/create-fields.spec.js +++ b/src/create-fields.spec.js @@ -2,18 +2,18 @@ /* eslint-disable no-unused-expressions */ import { expect } from 'chai'; -import Categorical from './fields/categorical'; -import Temporal from './fields/temporal'; -import Binned from './fields/binned'; -import Continuous from './fields/continuous'; +import Categorical from './fields/categorical'; +import Temporal from './fields/temporal'; +import Binned from './fields/binned'; +import Continuous from './fields/continuous'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import { MeasureSubtype, DimensionSubtype } from './enums'; -import PartialField from './fields/partial-field' +import PartialField from './fields/partial-field'; describe('Creating Field', () => { describe('#createUnitFieldFromPartial', () => { it('should return an array of correct field instances', () => { - let mockedPartialField = new PartialField('Country',[],{ name: 'Country' }, null); + let mockedPartialField = new PartialField('Country', [], { name: 'Country' }, null); // mockedPartialField.schema = { name: 'Country' } let mockedRowDiffset = '1-2'; expect(createUnitFieldFromPartial(mockedPartialField, mockedRowDiffset) instanceof Categorical).to.be.true; diff --git a/src/datamodel.js b/src/datamodel.js index 5768f51..b7e3652 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -25,8 +25,8 @@ import reducerStore from './utils/reducer-store'; import { createFields } from './field-creator'; import InvalidAwareTypes from './invalid-aware-types'; import Value from './value'; -import { converterStore } from './converter' -import { fieldRegistry } from './fields' +import { converterStore } from './converter'; +import { fieldRegistry } from './fields'; /** * DataModel is an in-browser representation of tabular data. It supports @@ -99,14 +99,14 @@ class DataModel extends Relation { /** * Converters are functions that transforms data in various format tpo datamodel consumabe format. */ - static get Converters(){ + static get Converters() { return converterStore; } /** * Register new type of fields */ - static get FieldTypes(){ + static get FieldTypes() { return fieldRegistry; } diff --git a/src/export.js b/src/export.js index b9ca731..422c3ab 100644 --- a/src/export.js +++ b/src/export.js @@ -17,7 +17,7 @@ import { } from './operator'; import * as Stats from './stats'; import * as enums from './enums'; -import { DataConverter } from './converter' +import { DataConverter } from './converter'; import { DateTimeFormatter } from './utils'; import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; diff --git a/src/field-creator.js b/src/field-creator.js index d30765e..06f6fa6 100644 --- a/src/field-creator.js +++ b/src/field-creator.js @@ -1,17 +1,5 @@ import { FieldType, DimensionSubtype, MeasureSubtype } from './enums'; -import { - Categorical, - Temporal, - Binned, - Continuous, - CategoricalParser, - TemporalParser, - BinnedParser, - ContinuousParser, - PartialField -} from './fields'; - -import { fieldRegistry } from './fields' +import { fieldRegistry } from './fields'; /** * Creates a field instance according to the provided data and schema. @@ -20,62 +8,26 @@ import { fieldRegistry } from './fields' * @param {Object} schema - The field schema object. * @return {Field} Returns the newly created field instance. */ -// function createUnitField(data, schema) { -// data = data || []; -// let partialField; - -// switch (schema.type) { -// case FieldType.MEASURE: -// switch (schema.subtype) { -// case MeasureSubtype.CONTINUOUS: -// partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); -// return new Continuous(partialField, `0-${data.length - 1}`); -// default: -// partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); -// return new Continuous(partialField, `0-${data.length - 1}`); -// } -// case FieldType.DIMENSION: -// switch (schema.subtype) { -// case DimensionSubtype.CATEGORICAL: -// partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); -// return new Categorical(partialField, `0-${data.length - 1}`); -// case DimensionSubtype.TEMPORAL: -// partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema)); -// return new Temporal(partialField, `0-${data.length - 1}`); -// case DimensionSubtype.BINNED: -// partialField = new PartialField(schema.name, data, schema, new BinnedParser()); -// return new Binned(partialField, `0-${data.length - 1}`); -// default: -// partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); -// return new Categorical(partialField, `0-${data.length - 1}`); -// } -// default: -// partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); -// return new Categorical(partialField, `0-${data.length - 1}`); -// } -// } - function createUnitField(data, schema) { data = data || []; - if(fieldRegistry.has(schema.subtype)){ + if (fieldRegistry.has(schema.subtype)) { return fieldRegistry.get(schema.subtype) - .BUILDER - .fieldName(schema.name) - .schema(schema) - .data(data) - .rowDiffset(`0-${data.length - 1}`) - .build() - } else { - return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) - .BUILDER - .fieldName(schema.name) - .schema(schema) - .data(data) - .rowDiffset(`0-${data.length - 1}`) - .build() - + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build(); } + return fieldRegistry + .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build(); } @@ -89,20 +41,19 @@ function createUnitField(data, schema) { export function createUnitFieldFromPartial(partialField, rowDiffset) { const { schema } = partialField; - if(fieldRegistry.has(schema.subtype)){ + if (fieldRegistry.has(schema.subtype)) { return fieldRegistry.get(schema.subtype) - .BUILDER - .partialField(partialField) - .rowDiffset(rowDiffset) - .build() - } else { - return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) - .BUILDER - .partialField(partialField) - .rowDiffset(rowDiffset) - .build() - + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build(); } + return fieldRegistry + .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build(); } /** diff --git a/src/fields/binned/index.js b/src/fields/binned/index.js index 5f34d37..8438416 100644 --- a/src/fields/binned/index.js +++ b/src/fields/binned/index.js @@ -1,5 +1,5 @@ import Dimension from '../dimension'; -import BinnedParser from '../parsers/binned-parser' +import BinnedParser from '../parsers/binned-parser'; /** * Represents binned field subtype. @@ -31,7 +31,7 @@ export default class Binned extends Dimension { return this.partialField.schema.bins; } - static parser(){ + static parser() { return new BinnedParser(); } } diff --git a/src/fields/categorical/index.js b/src/fields/categorical/index.js index 48c0ea6..e62f50d 100644 --- a/src/fields/categorical/index.js +++ b/src/fields/categorical/index.js @@ -1,7 +1,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { DimensionSubtype } from '../../enums'; import Dimension from '../dimension'; -import CategoricalParser from '../parsers/categorical-parser' +import CategoricalParser from '../parsers/categorical-parser'; /** * Represents categorical field subtype. * @@ -43,7 +43,7 @@ export default class Categorical extends Dimension { return domain; } - static parser(){ + static parser() { return new CategoricalParser(); } } diff --git a/src/fields/continuous/index.js b/src/fields/continuous/index.js index 3067a5e..a9b6688 100644 --- a/src/fields/continuous/index.js +++ b/src/fields/continuous/index.js @@ -2,7 +2,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { MeasureSubtype } from '../../enums'; import Measure from '../measure'; import InvalidAwareTypes from '../../invalid-aware-types'; -import ContinuousParser from '../parsers/continuous-parser' +import ContinuousParser from '../parsers/continuous-parser'; /** * Represents continuous field subtype. @@ -52,7 +52,7 @@ export default class Continuous extends Measure { return [min, max]; } - static parser(){ - return new ContinuousParser(); + static parser() { + return new ContinuousParser(); } } diff --git a/src/fields/field-registry.js b/src/fields/field-registry.js index 55a799f..9b228f0 100644 --- a/src/fields/field-registry.js +++ b/src/fields/field-registry.js @@ -1,36 +1,36 @@ -import Categorical from './categorical'; -import Temporal from './temporal'; -import Binned from './binned'; -import Continuous from './continuous'; -import { DimensionSubtype ,MeasureSubtype} from '../enums' +import Categorical from './categorical'; +import Temporal from './temporal'; +import Binned from './binned'; +import Continuous from './continuous'; +import { DimensionSubtype, MeasureSubtype } from '../enums'; -class FieldTypeRegistry{ - constructor(){ - this._fieldType = new Map(); +class FieldTypeRegistry { + constructor() { + this._fieldType = new Map(); } - registerFieldType(subtype,dimension){ - this._fieldType.set(subtype,dimension); + registerFieldType(subtype, dimension) { + this._fieldType.set(subtype, dimension); return this; } - has(type){ + has(type) { return this._fieldType.has(type); } - get(type){ + get(type) { return this._fieldType.get(type); } } -const registerDefaultFields = (store) => { +const registerDefaultFields = (store) => { store - .registerFieldType(DimensionSubtype.CATEGORICAL,Categorical) - .registerFieldType(DimensionSubtype.TEMPORAL,Temporal) - .registerFieldType(DimensionSubtype.BINNED,Binned) - .registerFieldType(MeasureSubtype.CONTINUOUS,Continuous) -} + .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical) + .registerFieldType(DimensionSubtype.TEMPORAL, Temporal) + .registerFieldType(DimensionSubtype.BINNED, Binned) + .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous); +}; const fieldRegistry = (function () { let store = null; @@ -47,5 +47,3 @@ const fieldRegistry = (function () { export default fieldRegistry; - - diff --git a/src/fields/field-registry.spec.js b/src/fields/field-registry.spec.js new file mode 100644 index 0000000..e69de29 diff --git a/src/fields/field/index.js b/src/fields/field/index.js index 24b64dd..beb7294 100644 --- a/src/fields/field/index.js +++ b/src/fields/field/index.js @@ -1,6 +1,5 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; -import PartialField from '../partial-field' -import FieldParser from '../parsers/field-parser'; +import PartialField from '../partial-field'; /** * In {@link DataModel}, every tabular data consists of column, a column is stored as field. @@ -34,8 +33,8 @@ export default class Field { this.rowDiffset = rowDiffset; } - static parser(){ - throw new Error("Not yet implemented") + static parser() { + throw new Error('Not yet implemented'); } /** @@ -132,43 +131,46 @@ export default class Field { throw new Error('Not yet implemented'); } - static get BUILDER(){ + static get BUILDER() { const builder = { - _params : {}, - _context : this, - fieldName : function(name) { + _params: {}, + _context: this, + fieldName(name) { this._params.name = name; return this; }, - schema : function(schema){ + schema(schema) { this._params.schema = schema; return this; }, - data : function(data){ + data(data) { this._params.data = data; return this; }, - partialField : function(partialField){ - this._params.partialField = partialField + partialField(partialField) { + this._params.partialField = partialField; return this; }, - rowDiffset : function(rowDiffset){ - this._params.rowDiffset = rowDiffset + rowDiffset(rowDiffset) { + this._params.rowDiffset = rowDiffset; return this; }, - build : function(){ + build() { let partialField = null; - if(this._params.partialField instanceof PartialField){ - partialField = this._params.partialField - }else if(this._params.schema && this._params.data ){ - partialField = new PartialField(this._params.name, this._params.data, this._params.schema, this._context.parser()) + if (this._params.partialField instanceof PartialField) { + partialField = this._params.partialField; + } else if (this._params.schema && this._params.data) { + partialField = new PartialField(this._params.name, + this._params.data, + this._params.schema, + this._context.parser()); } else { - throw new Error("Invalid Field parameters") + throw new Error('Invalid Field parameters'); } - return new this._context(partialField,this._params.rowDiffset); + return new this._context(partialField, this._params.rowDiffset); } - } + }; return builder; } } diff --git a/src/fields/parsers/temporal-parser/index.js b/src/fields/parsers/temporal-parser/index.js index a5622a8..77ef92c 100644 --- a/src/fields/parsers/temporal-parser/index.js +++ b/src/fields/parsers/temporal-parser/index.js @@ -29,10 +29,10 @@ export default class TemporalParser extends FieldParser { * @param {string|number} val - The value of the field. * @return {number} Returns the millisecond value. */ - parse (val, { format } ) { + parse (val, { format }) { let result; // check if invalid date value - if(!this._dtf){ + if (!this._dtf) { this._dtf = new DateTimeFormatter(format); } if (!InvalidAwareTypes.isInvalid(val)) { diff --git a/src/fields/parsers/temporal-parser/index.spec.js b/src/fields/parsers/temporal-parser/index.spec.js index 73d4aa5..728eeaf 100644 --- a/src/fields/parsers/temporal-parser/index.spec.js +++ b/src/fields/parsers/temporal-parser/index.spec.js @@ -24,30 +24,30 @@ describe('TemporalParser', () => { it('should return milliseconds for the formatted value', () => { const dateStr = '2017-03-01'; const expectedTs = new DateTimeFormatter(schema.format).getNativeDate(dateStr).getTime(); - expect(temParser.parse(dateStr,{ format:schema.format })).to.equal(expectedTs); + expect(temParser.parse(dateStr, { format: schema.format })).to.equal(expectedTs); }); it('should bypass to Date API when format is not present', () => { const val = 1540629018697; temParser = new TemporalParser(); - expect(temParser.parse(val,{ format: undefined })).to.equal(+new Date(val)); + expect(temParser.parse(val, { format: undefined })).to.equal(+new Date(val)); }); it('should return default invalid type for invalid value', () => { - expect(temParser.parse(null,{ format:schema.format })).to.eql(DataModel.InvalidAwareTypes.NULL); - expect(temParser.parse(undefined,{ format:schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); - expect(temParser.parse('abcd',{ format:schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse(null, { format: schema.format })).to.eql(DataModel.InvalidAwareTypes.NULL); + expect(temParser.parse(undefined, { format: schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('abcd', { format: schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); }); it('should return valid date for edge case', () => { - expect(temParser.parse('',{ format:schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('', { format: schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); temParser = new TemporalParser(); - expect(temParser.parse('1998',{ format: '%Y' })).to.equal(new Date(1998, 0, 1).getTime()); + expect(temParser.parse('1998', { format: '%Y' })).to.equal(new Date(1998, 0, 1).getTime()); temParser = new TemporalParser(); - expect(temParser.parse('98',{ format: '%y' })).to.equal(new Date(1998, 0, 1).getTime()); + expect(temParser.parse('98', { format: '%y' })).to.equal(new Date(1998, 0, 1).getTime()); - expect(temParser.parse('abcd',{ format: '%y' })).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('abcd', { format: '%y' })).to.equal(DataModel.InvalidAwareTypes.NA); }); }); }); diff --git a/src/fields/partial-field/index.js b/src/fields/partial-field/index.js index 299ba7e..a31457c 100644 --- a/src/fields/partial-field/index.js +++ b/src/fields/partial-field/index.js @@ -31,6 +31,6 @@ export default class PartialField { * @return {Array} Returns the sanitized data. */ _sanitize (data) { - return data.map(datum => this.parser.parse(datum, { format : this.schema.format })); + return data.map(datum => this.parser.parse(datum, { format: this.schema.format })); } } diff --git a/src/fields/partial-field/index.spec.js b/src/fields/partial-field/index.spec.js index 290e08c..49edf06 100644 --- a/src/fields/partial-field/index.spec.js +++ b/src/fields/partial-field/index.spec.js @@ -31,7 +31,7 @@ describe('PartialField', () => { }); it('should sanitize the input data before use', () => { - const expected = data.map(d => temParser.parse(d,{ format : schema.format })); + const expected = data.map(d => temParser.parse(d, { format: schema.format })); expect(partField.data).to.eql(expected); }); }); diff --git a/src/fields/temporal/index.js b/src/fields/temporal/index.js index eeac4af..0df6bbf 100644 --- a/src/fields/temporal/index.js +++ b/src/fields/temporal/index.js @@ -2,7 +2,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import Dimension from '../dimension'; import { DateTimeFormatter } from '../../utils'; import InvalidAwareTypes from '../../invalid-aware-types'; -import TemporalParser from '../parsers/temporal-parser' +import TemporalParser from '../parsers/temporal-parser'; /** * Represents temporal field subtype. @@ -123,8 +123,8 @@ export default class Temporal extends Dimension { return data; } - static parser(){ + static parser() { return new TemporalParser(); - } + } } diff --git a/src/helper.js b/src/helper.js index 9acce88..8562f16 100644 --- a/src/helper.js +++ b/src/helper.js @@ -7,7 +7,7 @@ import { import { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import defaultConfig from './default-config'; -import { converterStore } from './converter'; +import { converterStore } from './converter'; import { extend2, detectDataFormat } from './utils'; /** @@ -421,7 +421,7 @@ export const updateData = (relation, data, schema, options) => { schema = sanitizeAndValidateSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); const converter = converterStore.get(options.dataFormat); - + if (!converter) { throw new Error(`No converter function found for ${options.dataFormat} format`); From bbc1d897bf81428bfb6053ef3076434a70724e64 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Thu, 10 Oct 2019 17:27:34 +0530 Subject: [PATCH 12/20] Code Coverage --- src/converter/dataConverterStore.js | 15 ++--- src/converter/dataConverterStore.spec.js | 31 ++++++++++ .../defaultConverters/jsonConverter.spec.js | 56 +++++++++++++++++++ src/converter/model/dataConverter.spec.js | 11 ++++ src/fields/field-registry.js | 9 +-- 5 files changed, 107 insertions(+), 15 deletions(-) create mode 100644 src/converter/dataConverterStore.spec.js create mode 100644 src/converter/defaultConverters/jsonConverter.spec.js create mode 100644 src/converter/model/dataConverter.spec.js diff --git a/src/converter/dataConverterStore.js b/src/converter/dataConverterStore.js index 81cf09e..6e3620e 100644 --- a/src/converter/dataConverterStore.js +++ b/src/converter/dataConverterStore.js @@ -24,10 +24,8 @@ class DataConverterStore { * @param {Array} converters : contains array of converter instance * @return { Map } */ - converters(converters) { - if (converters.length) { - converters.forEach(converter => this.store.set(converter.type, converter)); - } + converters(converters = []) { + converters.forEach(converter => this.store.set(converter.type, converter)); return this.store; } @@ -39,8 +37,9 @@ class DataConverterStore { register(converter) { if (converter instanceof DataConverter) { this.store.set(converter.type, converter); + return this; } - return this; + return null; } /** @@ -67,12 +66,10 @@ const converterStore = (function () { let store = null; function getStore () { - if (store === null) { - store = new DataConverterStore(); - } + store = new DataConverterStore(); return store; } - return getStore(); + return store || getStore(); }()); export default converterStore; diff --git a/src/converter/dataConverterStore.spec.js b/src/converter/dataConverterStore.spec.js new file mode 100644 index 0000000..89966fa --- /dev/null +++ b/src/converter/dataConverterStore.spec.js @@ -0,0 +1,31 @@ +/* global describe, it */ +/* eslint-disable no-unused-expressions */ + +import { expect } from 'chai'; +import converterStore from './dataConverterStore'; +import DataConverter from '../converter/model/dataConverter'; + +describe('#DataConverterStore', () => { + it('should register and unregister converter', () => { + class JSONConverter2 extends DataConverter { + constructor() { + super('json2'); + } + + convert() { + return ''; + } + } + + const converter = new JSONConverter2(); + converterStore.register(converter); + expect(converterStore.get('json2')).to.not.null; + + converterStore.unregister(converter); + expect(converterStore.get('json2')).to.null; + }); + + it('should not register invalid Coverter', () => { + expect(converterStore.register(() => {})).to.null; + }); +}); diff --git a/src/converter/defaultConverters/jsonConverter.spec.js b/src/converter/defaultConverters/jsonConverter.spec.js new file mode 100644 index 0000000..c398456 --- /dev/null +++ b/src/converter/defaultConverters/jsonConverter.spec.js @@ -0,0 +1,56 @@ +/* global describe, it ,beforeEach */ +/* eslint-disable no-unused-expressions */ + +import { expect } from 'chai'; +import JSONConverter from './jsonConverter'; + +describe('JSON Converter', () => { + let data; + let converter = new JSONConverter(); + beforeEach(() => { + data = [ + { + a: 1, + b: 2, + c: 3 + }, + { + a: 4, + b: 5, + c: 6 + }, + { + a: 7, + b: 8, + c: 9 + } + ]; + }); + + describe('#JSON', () => { + it('should convert to JSON data', () => { + const schema = [ + { + name: 'a', + type: 'measure', + subtype: 'continuous' + }, + { + name: 'b', + type: 'measure', + subtype: 'continuous' + }, + { + name: 'c', + type: 'measure', + subtype: 'continuous' + } + ]; + + const parsedData = converter.convert(data, schema); + const expected = [['a', 'b', 'c'], [[1, 4, 7], [2, 5, 8], [3, 6, 9]]]; + + expect(parsedData).to.deep.equal(expected); + }); + }); +}); diff --git a/src/converter/model/dataConverter.spec.js b/src/converter/model/dataConverter.spec.js new file mode 100644 index 0000000..cabd996 --- /dev/null +++ b/src/converter/model/dataConverter.spec.js @@ -0,0 +1,11 @@ +/* global describe, it */ +/* eslint-disable no-unused-expressions */ + +import { expect } from 'chai'; +import DataConverter from './dataConverter'; + +describe('#DataConverterModel', () => { + it('should throw error', () => { + expect(new DataConverter().convert).to.throw(Error, 'Convert method not implemented'); + }); +}); diff --git a/src/fields/field-registry.js b/src/fields/field-registry.js index 9b228f0..984f492 100644 --- a/src/fields/field-registry.js +++ b/src/fields/field-registry.js @@ -34,15 +34,12 @@ const registerDefaultFields = (store) => { const fieldRegistry = (function () { let store = null; - function getStore () { - if (store === null) { - store = new FieldTypeRegistry(); - registerDefaultFields(store); - } + store = new FieldTypeRegistry(); + registerDefaultFields(store); return store; } - return getStore(); + return store || getStore(); }()); export default fieldRegistry; From d6f2c66feda21d6da79192fafc9c0196f56df617 Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Sun, 13 Oct 2019 10:25:17 +0530 Subject: [PATCH 13/20] Exposed Utilities --- dist/datamodel.js | 2 +- dist/datamodel.js.map | 2 +- package.json | 2 +- src/export.js | 6 ++++-- src/fields/index.js | 1 + 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/dist/datamodel.js b/dist/datamodel.js index a53fee1..bdf7d9a 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var i=t[r]={i:r,l:!1,exports:{}};return e[r].call(i.exports,i,i.exports,n),i.l=!0,i.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)n.d(r,i,function(t){return e[t]}.bind(null,i));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports=JSON.parse('{"name":"datamodel","description":"Relational algebra compliant in-memory tabular data store","homepage":"https://github.com/chartshq/datamodel","version":"2.2.1","license":"MIT","main":"dist/datamodel.js","keywords":["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],"author":"Muzejs.org (https://muzejs.org/)","repository":{"type":"git","url":"https://github.com/chartshq/datamodel.git"},"contributors":[{"name":"Akash Goswami","email":"akashgoswami90s@gmail.com"},{"name":"Subhash Haldar"},{"name":"Rousan Ali","email":"rousanali786@gmail.com","url":"https://rousan.io"},{"name":"Ujjal Kumar Dutta","email":"duttaujjalkumar@live.com"}],"dependencies":{"d3-dsv":"^1.0.8"},"devDependencies":{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0","chai":"3.5.0","cross-env":"^5.0.5","eslint":"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0","jsdoc":"3.5.5","json2yaml":"^1.1.0","karma":"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3","marked":"^0.5.0","mocha":"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0","webpack":"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},"scripts":{"test":"npm run lint && npm run ut","ut":"karma start karma.conf.js","utd":"karma start --single-run false --browsers Chrome karma.conf.js ","build":"npm run build:prod","build:dev":"webpack --mode development","build:prod":"webpack --mode production","start":"webpack-dev-server --config webpack.config.dev.js --mode development --open","lint":"eslint ./src","lint-errors":"eslint --quiet ./src","docs":"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}')},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",(function(){return o})),n.d(r,"DimensionSubtype",(function(){return u})),n.d(r,"MeasureSubtype",(function(){return c})),n.d(r,"FieldType",(function(){return f})),n.d(r,"FilteringMode",(function(){return l})),n.d(r,"GROUP_BY_FUNCTIONS",(function(){return s}));var i={};n.r(i),n.d(i,"Dimension",(function(){return Me})),n.d(i,"Measure",(function(){return Ue})),n.d(i,"FieldParser",(function(){return Re})),n.d(i,"fieldRegistry",(function(){return it}));var a={};n.r(a),n.d(a,"sum",(function(){return jn})),n.d(a,"avg",(function(){return An})),n.d(a,"min",(function(){return kn})),n.d(a,"max",(function(){return Dn})),n.d(a,"first",(function(){return Sn})),n.d(a,"last",(function(){return Tn})),n.d(a,"count",(function(){return Fn})),n.d(a,"sd",(function(){return Nn}));var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function p(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,i=void 0;if(!n)return t;var a=n.toLowerCase();for(i=0,r=e.length;ii.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,a=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};y.forEach((function(e,t){w[a[t].name]=e})),p&&L.CROSS!==i?o[h]=w:(o.push(w),p=!0,h=n)}else if((i===L.LEFTOUTER||i===L.RIGHTOUTER)&&!p){var _={},O=c.fields.length-1;y.forEach((function(e,t){_[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(_)}}))})),new En(o,a,{name:p})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=un(t,r))&&("function"==typeof i?z(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=q(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return un(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(a,i);z(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var Q,ee=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=K(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&$(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!un(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);$(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return un(t,e)}));var i=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Oe.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Oe.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=be)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new Cn(h,p,{name:c}),v}function Ae(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new Cn(a,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,L.LEFTOUTER)}function Se(e,t,n){return G(t,e,n,!1,L.RIGHTOUTER)}var Te=function(){function e(e,t){for(var n=0;nn&&(n=i))})),[t,n]}}],[{key:"parser",value:function(){return new $e}}]),t}(Ue),tt=function(){function e(e,t){for(var n=0;n9999?"+"+yt(t,6):yt(t,4))+"-"+yt(e.getUTCMonth()+1,2)+"-"+yt(e.getUTCDate(),2)+(a?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"."+yt(a,3)+"Z":i?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"Z":r||n?"T"+yt(n,2)+":"+yt(r,2)+"Z":"")}var bt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return lt;if(f)return f=!1,ft;var t,r,i=o;if(e.charCodeAt(i)===st){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===pt?f=!0:r===dt&&(f=!0,e.charCodeAt(o)===pt&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Lt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Gt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Lt(e._ancestorDerivation).concat(Lt(e._derivation)))},Jt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Gt(e,t)},zt=(Ut(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ut(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ut(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=xt(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return i(c,(function(e){return t(Vt(f,l,s,e),e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n){for(var r=n(e,t,0),i=1,a=e.length;i2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||V,a=n.filterByMeasure||!1,o=Zt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),i=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return i[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),p=r.data,d=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&p.forEach((function(e){var t=$t(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},p.length?function(e){var t=!c||h[$t(o,e,n)];return a?l.every((function(t){return e[t].internalValue>=d[t][0]&&e[t].internalValue<=d[t][1]}))&&t:t}:function(){return!1};var t,n,r,i,o,c,f,l,s,p,d,h})):[function(){return!1}];return i===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},en=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Jt(n,e,U.SELECT,{config:r},i)},tn=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Jt(e,i,U.PROJECT,{projField:t,config:n,actualProjField:a},null),i},nn=function(e,t,n,r){return t.map((function(t){return tn(e,t,n,r)}))},rn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},an=function(e){return e.map((function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,i=e.subtype,a=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(i))throw new Error("DataModel doesn't support dimension field subtype "+i+" used for "+a+" field");break;case f.MEASURE:if(-1===t.indexOf(i))throw new Error("DataModel doesn't support measure field subtype "+i+" used for "+a+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+a+" field")}}(e=rn(e)),e}))},on=function(e,t,n,r){n=an(n),r=Object.assign(Object.assign({},ot),r);var i=Mt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=xt(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=at(f,n,c),s=S.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=Vt(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},un=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.nonTraversingModel,o=i.excludeModels||[];if(t!==a){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=cn(n,t);e(t,a,r,i)}))}},ln=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==U.GROUPBY}));)e=e._parent;return e},sn=function(e){for(;e._parent;)e=e._parent;return e},pn=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},dn=function(e,t,n,r){var i=void 0,a=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],i=[];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter((function(e){return e.config.sourceId!==c})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:i,models:e.model,path:pn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Lt(d),[e])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Lt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(a=Qt(b,i,{filterByMeasure:f}),fn(b,a,m)),l.forEach((function(e){var t=Qt(y,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},yn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Lt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},mn=function(e,t){return e.numberFormat?e.numberFormat()(t):t},bn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),en(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=yn(e,i,r),u=void 0;a===l.ALL?u=[tn(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),tn(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=tn(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),wn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},_n=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=je.apply(void 0,i);return Jt(this,a,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Oe.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Jt(this,a,U.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new F(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=rn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,On(t).concat([e,p,s]))}));var h=at([d],[e],[e.name]),v=wn(h,1)[0];return c.addField(v),Jt(this,c,U.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=sn(this),c=u._propagationNameSpace,f=ln(this),l={groupByModel:f,model:u};return n&&vn(c,t,this),dn(e,l,{propagationNameSpace:c,sourceId:a},Object.assign({payload:o},t)),i&&hn(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=yn([].concat(On(e),On(t)),a,i)})),nn(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Oe}},{key:"Converters",get:function(){return Mt}},{key:"FieldTypes",get:function(){return it}}]),t}(gn),jn=me.sum,An=me.avg,kn=me.min,Dn=me.max,Sn=me.first,Tn=me.last,Fn=me.count,Nn=me.std,Rn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,B(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Jt(e,r,U.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;ni.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,a=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};y.forEach((function(e,t){w[a[t].name]=e})),p&&L.CROSS!==i?o[h]=w:(o.push(w),p=!0,h=n)}else if((i===L.LEFTOUTER||i===L.RIGHTOUTER)&&!p){var _={},O=c.fields.length-1;y.forEach((function(e,t){_[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(_)}}))})),new En(o,a,{name:p})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=un(t,r))&&("function"==typeof i?z(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=q(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return un(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(a,i);z(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var Q,ee=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=K(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&$(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!un(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);$(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return un(t,e)}));var i=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Oe.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Oe.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=be)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new Cn(h,p,{name:c}),v}function Ae(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new Cn(a,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,L.LEFTOUTER)}function Se(e,t,n){return G(t,e,n,!1,L.RIGHTOUTER)}var Te=function(){function e(e,t){for(var n=0;nn&&(n=i))})),[t,n]}}],[{key:"parser",value:function(){return new $e}}]),t}(Me),tt=function(){function e(e,t){for(var n=0;n9999?"+"+yt(t,6):yt(t,4))+"-"+yt(e.getUTCMonth()+1,2)+"-"+yt(e.getUTCDate(),2)+(a?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"."+yt(a,3)+"Z":i?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"Z":r||n?"T"+yt(n,2)+":"+yt(r,2)+"Z":"")}var bt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return lt;if(f)return f=!1,ft;var t,r,i=o;if(e.charCodeAt(i)===st){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===pt?f=!0:r===dt&&(f=!0,e.charCodeAt(o)===pt&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o0&&void 0!==arguments[0]?arguments[0]:[];return t.forEach((function(t){return e.store.set(t.type,t)})),this.store}},{key:"register",value:function(e){return e instanceof ct?(this.store.set(e.type,e),this):null}},{key:"unregister",value:function(e){return this.store.delete(e.type),this}},{key:"get",value:function(e){return this.store.has(e)?this.store.get(e):null}}]),e}(),Mt=function(){var e=null;return e||(e=new It)}(),xt=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function Ut(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Lt(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Lt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Gt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Lt(e._ancestorDerivation).concat(Lt(e._derivation)))},Jt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Gt(e,t)},zt=(Ut(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ut(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ut(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=xt(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return i(c,(function(e){return t(Vt(f,l,s,e),e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n){for(var r=n(e,t,0),i=1,a=e.length;i2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||V,a=n.filterByMeasure||!1,o=Zt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),i=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return i[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),p=r.data,d=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&p.forEach((function(e){var t=$t(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},p.length?function(e){var t=!c||h[$t(o,e,n)];return a?l.every((function(t){return e[t].internalValue>=d[t][0]&&e[t].internalValue<=d[t][1]}))&&t:t}:function(){return!1};var t,n,r,i,o,c,f,l,s,p,d,h})):[function(){return!1}];return i===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},en=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Jt(n,e,U.SELECT,{config:r},i)},tn=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Jt(e,i,U.PROJECT,{projField:t,config:n,actualProjField:a},null),i},nn=function(e,t,n,r){return t.map((function(t){return tn(e,t,n,r)}))},rn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},an=function(e){return e.map((function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,i=e.subtype,a=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(i))throw new Error("DataModel doesn't support dimension field subtype "+i+" used for "+a+" field");break;case f.MEASURE:if(-1===t.indexOf(i))throw new Error("DataModel doesn't support measure field subtype "+i+" used for "+a+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+a+" field")}}(e=rn(e)),e}))},on=function(e,t,n,r){n=an(n),r=Object.assign(Object.assign({},ot),r);var i=Mt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=xt(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=at(f,n,c),s=S.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=Vt(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},un=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.nonTraversingModel,o=i.excludeModels||[];if(t!==a){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=cn(n,t);e(t,a,r,i)}))}},ln=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==U.GROUPBY}));)e=e._parent;return e},sn=function(e){for(;e._parent;)e=e._parent;return e},pn=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},dn=function(e,t,n,r){var i=void 0,a=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],i=[];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter((function(e){return e.config.sourceId!==c})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:i,models:e.model,path:pn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Lt(d),[e])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Lt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(a=Qt(b,i,{filterByMeasure:f}),fn(b,a,m)),l.forEach((function(e){var t=Qt(y,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},yn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Lt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},mn=function(e,t){return e.numberFormat?e.numberFormat()(t):t},bn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),en(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=yn(e,i,r),u=void 0;a===l.ALL?u=[tn(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),tn(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=tn(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),wn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},_n=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=je.apply(void 0,i);return Jt(this,a,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Oe.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Jt(this,a,U.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new F(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=rn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,On(t).concat([e,p,s]))}));var h=at([d],[e],[e.name]),v=wn(h,1)[0];return c.addField(v),Jt(this,c,U.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=sn(this),c=u._propagationNameSpace,f=ln(this),l={groupByModel:f,model:u};return n&&vn(c,t,this),dn(e,l,{propagationNameSpace:c,sourceId:a},Object.assign({payload:o},t)),i&&hn(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=yn([].concat(On(e),On(t)),a,i)})),nn(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Oe}},{key:"Converters",get:function(){return Mt}},{key:"FieldTypes",get:function(){return it}}]),t}(gn),jn=me.sum,An=me.avg,kn=me.min,Dn=me.max,Sn=me.first,Tn=me.last,Fn=me.count,Nn=me.std,Rn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,B(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Jt(e,r,U.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format : this.schema.format }));\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field'\nimport FieldParser from '../parsers/field-parser';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser(){\n throw new Error(\"Not yet implemented\")\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER(){\n const builder = {\n _params : {},\n _context : this,\n fieldName : function(name) {\n this._params.name = name;\n return this;\n },\n schema : function(schema){\n this._params.schema = schema;\n return this;\n },\n data : function(data){\n this._params.data = data;\n return this;\n },\n partialField : function(partialField){\n this._params.partialField = partialField\n return this;\n },\n rowDiffset : function(rowDiffset){\n this._params.rowDiffset = rowDiffset\n return this;\n },\n build : function(){\n let partialField = null;\n if(this._params.partialField instanceof PartialField){\n partialField = this._params.partialField\n }else if(this._params.schema && this._params.data ){\n partialField = new PartialField(this._params.name, this._params.data, this._params.schema, this._context.parser())\n }\n else {\n throw new Error(\"Invalid Field parameters\")\n }\n return new this._context(partialField,this._params.rowDiffset);\n }\n }\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser'\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser(){\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n // constructor (schema) {\n // super();\n // this.schema = schema;\n // this._dtf = new DateTimeFormatter(format);\n // }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format } ) {\n let result;\n // check if invalid date value\n if(!this._dtf){\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser'\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser(){\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser'\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser(){\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser'\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser(){\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype ,MeasureSubtype} from '../enums'\n\n\nclass FieldTypeRegistry{\n constructor(){\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype,dimension){\n this._fieldType.set(subtype,dimension);\n return this;\n }\n\n has(type){\n return this._fieldType.has(type);\n }\n\n get(type){\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL,Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL,Temporal)\n .registerFieldType(DimensionSubtype.BINNED,Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS,Continuous)\n}\n\nconst fieldRegistry = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n }\n return store;\n }\n return getStore();\n}());\n\nexport default fieldRegistry;\n\n\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\nimport { fieldRegistry } from './fields'\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\n// function createUnitField(data, schema) {\n// data = data || [];\n// let partialField;\n\n// switch (schema.type) {\n// case FieldType.MEASURE:\n// switch (schema.subtype) {\n// case MeasureSubtype.CONTINUOUS:\n// partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n// return new Continuous(partialField, `0-${data.length - 1}`);\n// default:\n// partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n// return new Continuous(partialField, `0-${data.length - 1}`);\n// }\n// case FieldType.DIMENSION:\n// switch (schema.subtype) {\n// case DimensionSubtype.CATEGORICAL:\n// partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n// return new Categorical(partialField, `0-${data.length - 1}`);\n// case DimensionSubtype.TEMPORAL:\n// partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n// return new Temporal(partialField, `0-${data.length - 1}`);\n// case DimensionSubtype.BINNED:\n// partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n// return new Binned(partialField, `0-${data.length - 1}`);\n// default:\n// partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n// return new Categorical(partialField, `0-${data.length - 1}`);\n// }\n// default:\n// partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n// return new Categorical(partialField, `0-${data.length - 1}`);\n// }\n// }\n\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if(fieldRegistry.has(schema.subtype)){\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build()\n } else {\n return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build()\n \n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if(fieldRegistry.has(schema.subtype)){\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build()\n } else {\n return fieldRegistry.get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build()\n \n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter{\n constructor(type){\n this._type = type;\n }\n\n get type(){\n return this._type;\n }\n\n convert(data,schema,options){\n throw new Error(\"Convert method not implemented.\")\n }\n\n}","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from \"../model/dataConverter\";\nimport DSVStr from \"../utils/dsv-str\";\nimport DataFormat from '../../enums/data-format'\n\nexport default class DSVStringConverter extends DataConverter{\n constructor(){\n super(DataFormat.DSV_STR)\n }\n\n convert(data , schema , options){\n return DSVStr(data,schema,options);\n }\n} ","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from \"../model/dataConverter\";\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format'\n\nexport default class JSONConverter extends DataConverter{\n constructor(){\n super(DataFormat.FLAT_JSON)\n }\n\n convert(data , schema , options){\n return FlatJSON(data,schema,options);\n }\n} ","import DataConverter from \"../model/dataConverter\";\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format'\n\nexport default class DSVArrayConverter extends DataConverter{\n constructor(){\n super(DataFormat.DSV_ARR);\n }\n\n convert(data , schema , options){\n return DSVArr(data,schema,options);\n }\n} ","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from \"../model/dataConverter\";\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format'\n\nexport default class AutoDataConverter extends DataConverter{\n constructor(){\n super(DataFormat.AUTO)\n }\n\n convert(data , schema , options){\n return AUTO(data,schema,options);\n }\n} ","import DataConverter from './model/dataConverter'\nimport DSVStringConverter from './defaultConverters/dsvStringConverter';\nimport JSONConverter from './defaultConverters/jsonConverter';\nimport DSVArrayConverter from './defaultConverters/dsvArrayConverter';\nimport AutoDataConverter from './defaultConverters/autoCoverter'\n\nclass DataConverterStore {\n constructor(){\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters(){\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ]\n }\n\n /**\n * \n * @param {Array} converters : contains array of converter instance\n * @return { Map } \n */\n converters(converters){\n if(converters.length){\n converters.forEach(converter => this.store.set(converter.type,converter));\n }\n return this.store;\n }\n\n /**\n * \n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter){\n if(converter instanceof DataConverter){\n this.store.set(converter.type,converter)\n }\n return this;\n }\n\n /**\n * \n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter){\n this.store.delete(converter.type)\n return this;\n }\n\n get(name){\n if(this.store.has(name)){\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new DataConverterStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default converterStore;","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n \n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter'\nimport { fieldRegistry } from './fields'\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters(){\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes(){\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter'\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/defaultConverters/autoCoverter.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","len","store","fields","forEach","fieldIndex","Array","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","rawValue","formattedValue","getNumberFormattedVal","defineProperties","_value","configurable","writable","_formattedValue","_internalValue","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","formattedData","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","dataType","sortType","retFunc","getSortFn","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","applyStandardSort","sortingDetails","fieldName","sortMeta","fieldInSchema","sortingFn","slice","f","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","sortData","dataObj","filter","sDetial","groupSortingIdx","findIndex","standardSortingDetails","groupSortingDetails","detail","sortedGroupMap","row","nextMap","applyGroupSort","uids","pop","dataBuilder","colIdentifier","addUid","columnWise","retObj","reqSorting","tmpDataArr","colName","insertInd","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","filteredValues","min","max","sqrt","mean","num","variance","defaultReducerName","ReducerStore","defReducer","entries","reducer","__unregister","delete","Function","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","rowCount","hash","_","cachedStore","cloneProvider","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","internalValue","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","PartialField","_sanitize","Field","subtype","description","displayName","_params","_context","build","Dimension","_cachedDomain","calculateDataDomain","Measure","unit","numberFormat","FieldParser","CategoricalParser","isInvalid","getInvalidType","trim","Categorical","Set","domain","add","TemporalParser","_dtf","Temporal","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","dataFormat","parsedDatum","BinnedParser","matched","parseFloat","Binned","binsArr","bins","ContinuousParser","Continuous","NEGATIVE_INFINITY","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","fieldRegistry","createFields","dataColumn","headers","headersObj","header","BUILDER","createUnitField","DataConverter","_type","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","JSON","stringify","inferColumns","rows","columnSet","column","width","formatDate","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","N","I","eof","eol","j","preformatBody","formatValue","formatRow","test","convert","customConverter","concat","formatBody","formatRows","csv","dsv","tsv","DSVArr","schemaFields","unitSchema","firstRowHeader","columnMajor","headerMap","h","schemaField","headIndex","DSVStr","fieldSeparator","d3Dsv","DSVStringConverter","FlatJSON","insertionIndex","schemaFieldsName","JSONConverter","DSVArrayConverter","Auto","converters","AutoDataConverter","DataConverterStore","_getDefaultConverters","converter","converterStore","prepareSelectionData","rawData","resp","updateFields","partialFieldspace","fieldStoreName","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistCurrentDerivation","model","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","persistDerivations","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","getPartialFieldspace","formattedFieldsData","rawFieldsData","cloneWithAllFields","clone","calculateFieldsConfig","getKey","fn","filterPropagationModel","propModels","fns","filterByMeasure","clonedModel","modelFieldsConfig","getFieldsConfig","propModel","keyFn","getData","fieldsConfig","dLen","indices","fieldsSpace","v","valuesMap","present","every","select","saveChild","some","addDiffsetToClonedDm","selectConfig","cloneWithProject","projField","allFields","cloned","projectionSet","actualProjField","splitWithProject","projFieldSet","projFields","sanitizeUnitSchema","sanitizeAndValidateSchema","supportedMeasureSubTypes","supportedDimSubTypes","GEO","validateUnitSchema","updateData","relation","defaultConfig","dataHeader","fieldNameAs","as","resolveFieldName","nameSpace","_partialFieldspace","valueObjects","_cachedValueObjects","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","derivation","params","groupByString","getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","propagate","handlePropagation","children","_children","child","getRootGroupByModel","_parent","find","getRootDataModel","getPathToRootModel","path","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","values","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","propagateImmutableActions","immutableActions","filterImmutableAction","criteriaModel","addToPropNamespace","sourceNamespace","isMutableAction","getNormalizedProFields","fieldConfig","normalizedProjField","constructor","search","Relation","source","_fieldStoreName","_propagationNameSpace","_fieldspace","joinWith","unionWith","differenceWith","defConfig","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithSelect","setParent","_fieldConfig","fieldObj","def","removeChild","sibling","parent","_onPropagation","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","ids","fill","fieldsArr","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","cachedValueObjects","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","addField","addToNameSpace","payload","eventName","measureFieldName","binFieldName","measureField","binsCount","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","clonedDMs","splitWithSelect","uniqueFields","commonFields","normalizedProjFieldSets","fieldSet","first","last","count","sd","std","Operators","compose","operations","currentDM","firstChild","dispose","bin","project","calculateVariable","naturalJoin","fullOuterJoin","version","Stats","FieldsUtility","enums"],"mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,YAAa,GAAIH,GACE,iBAAZC,QACdA,QAAmB,UAAID,IAEvBD,EAAgB,UAAIC,IARtB,CASGK,QAAQ,WACX,O,YCTE,IAAIC,EAAmB,GAGvB,SAASC,EAAoBC,GAG5B,GAAGF,EAAiBE,GACnB,OAAOF,EAAiBE,GAAUP,QAGnC,IAAIC,EAASI,EAAiBE,GAAY,CACzCC,EAAGD,EACHE,GAAG,EACHT,QAAS,IAUV,OANAU,EAAQH,GAAUI,KAAKV,EAAOD,QAASC,EAAQA,EAAOD,QAASM,GAG/DL,EAAOQ,GAAI,EAGJR,EAAOD,QA0Df,OArDAM,EAAoBM,EAAIF,EAGxBJ,EAAoBO,EAAIR,EAGxBC,EAAoBQ,EAAI,SAASd,EAASe,EAAMC,GAC3CV,EAAoBW,EAAEjB,EAASe,IAClCG,OAAOC,eAAenB,EAASe,EAAM,CAAEK,YAAY,EAAMC,IAAKL,KAKhEV,EAAoBgB,EAAI,SAAStB,GACX,oBAAXuB,QAA0BA,OAAOC,aAC1CN,OAAOC,eAAenB,EAASuB,OAAOC,YAAa,CAAEC,MAAO,WAE7DP,OAAOC,eAAenB,EAAS,aAAc,CAAEyB,OAAO,KAQvDnB,EAAoBoB,EAAI,SAASD,EAAOE,GAEvC,GADU,EAAPA,IAAUF,EAAQnB,EAAoBmB,IAC/B,EAAPE,EAAU,OAAOF,EACpB,GAAW,EAAPE,GAA8B,iBAAVF,GAAsBA,GAASA,EAAMG,WAAY,OAAOH,EAChF,IAAII,EAAKX,OAAOY,OAAO,MAGvB,GAFAxB,EAAoBgB,EAAEO,GACtBX,OAAOC,eAAeU,EAAI,UAAW,CAAET,YAAY,EAAMK,MAAOA,IACtD,EAAPE,GAA4B,iBAATF,EAAmB,IAAI,IAAIM,KAAON,EAAOnB,EAAoBQ,EAAEe,EAAIE,EAAK,SAASA,GAAO,OAAON,EAAMM,IAAQC,KAAK,KAAMD,IAC9I,OAAOF,GAIRvB,EAAoB2B,EAAI,SAAShC,GAChC,IAAIe,EAASf,GAAUA,EAAO2B,WAC7B,WAAwB,OAAO3B,EAAgB,SAC/C,WAA8B,OAAOA,GAEtC,OADAK,EAAoBQ,EAAEE,EAAQ,IAAKA,GAC5BA,GAIRV,EAAoBW,EAAI,SAASiB,EAAQC,GAAY,OAAOjB,OAAOkB,UAAUC,eAAe1B,KAAKuB,EAAQC,IAGzG7B,EAAoBgC,EAAI,GAIjBhC,EAAoBA,EAAoBiC,EAAI,G,+jEClFrD,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,G,k3BCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCCKC,EANU,CACrBC,YAAa,cACbC,SAAU,WACVC,OAAQ,UCCGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,EAXY,CACvBC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAEC9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,EAAG,CAECM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,EAAG,CAECvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAECtG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAECvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAECzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAEC3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAEC7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAECjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAEClH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,EAAG,CAECC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAECpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAECvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,EAAG,CAECG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAECxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,EAAG,CAECjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI,UAAK8F,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAX,YAAWA,IACX,kBAASA,OAAT,YAASA,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU8C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIJ,SACAK,SACAC,SACAzE,SAcJ,OAZIqE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGvD,OAAOuD,EAAO,MACxBC,GAAO,GAGXN,EAAcE,GAEdF,EADOE,GAGOD,EAGbD,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCM,IACAzE,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkB+F,WAAa,SAAU9F,GAQrC,IAPA,IAAM+F,EAAchG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCmE,EAAgBzJ,OAAO0J,KAAK1B,GAC5B2B,EAAa,GACfrK,SACAsK,UAEItK,EAAImE,EAAOoG,QAAQL,EAAalK,EAAI,KAAO,GAC/CsK,EAAcnG,EAAOnE,EAAI,IACmB,IAAxCmK,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZlE,MAAOtG,EACPyK,MAAOH,IAIf,OAAOD,GASXnG,EAAkBwG,SAAW,SAAU3G,EAAMI,GACzC,IAQIlE,EARE0K,EAAQ7G,EAAoBC,GAC5BsG,EAAanG,EAAkB+F,WAAW9F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC4E,EAAeC,OAAO1G,GACpB+F,EAAchG,EAAkBU,aAClC6F,SACAK,SACA9K,SAGJ,IAAKA,EAAI,EAAGC,EAAIoK,EAAWtE,OAAQ/F,EAAIC,EAAGD,IAEtC8K,EAAepC,EADf+B,EAAQJ,EAAWrK,GAAGyK,OACYhE,UAAUkE,GAC5CC,EAAeA,EAAajG,QAAQ,IAAIH,OAAO0F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX1G,EAAkBtC,UAAUmJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM1B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK8G,kBAAkBF,GAClCG,EAAajH,EAAkBW,wBAC/BuG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAlG,SACAvF,SACA0L,SACAC,SACA1L,SACA+H,EAAS,GAEb,IAAKuD,KAAehC,EAChB,GAAK,GAAG1H,eAAe1B,KAAKoJ,EAAegC,GAA3C,CAMA,IAJAD,EAAKvF,OAAS,EAEd0F,GADAD,EAAiBjC,EAAcgC,IACHK,OAAOJ,EAAezF,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIuL,EAAezF,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNqH,EAAQF,EAAexL,IACFO,OAGjB+K,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOnG,IAM1B,GAAI,OAFJoG,EAAcF,EAAWI,MAAMzH,KAAMkH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWtF,QAAU3B,KAAK0H,gBAAgBT,EAAWtF,QAErDiC,EAAO+D,QAAQV,EAAW,GAAI,EAAG,GAEjCrD,EAAO+D,QAAP,MAAA/D,EAAkBqD,GAGfrD,GAQX9D,EAAkBtC,UAAUsJ,kBAAoB,SAAUF,GACtD,IAYI/K,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCkE,EAAchG,EAAkBU,aAChCyF,EAAanG,EAAkB+F,WAAW9F,GAC1C6H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGArM,SAEJqM,EAAcxB,OAAO1G,GAErB,IAAMmI,EAAWjC,EAAWkC,KAAI,SAAAC,GAAA,OAAOA,EAAI/B,SACrCgC,EAAmBpC,EAAWtE,OACpC,IAAK/F,EAAIyM,EAAmB,EAAGzM,GAAK,EAAGA,KACnCmM,EAAW9B,EAAWrK,GAAGsG,OAEV,IAAM+F,EAAYtG,OAAS,QAKdzB,IAAxB2H,IACAA,EAAsBI,EAAYtG,QAGtCqG,EAAaC,EAAYpE,UAAUkE,EAAW,EAAGF,GACjDI,EAAcA,EAAYpE,UAAU,EAAGkE,EAAW,GAC9C3H,OAAOC,OAAO2H,GACdC,EAAYpE,UAAUgE,EAAqBI,EAAYtG,QAE3DkG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKnM,EAAI,EAAGA,EAAIyM,EAAkBzM,IAC9BkM,EAAS7B,EAAWrK,GACpBqM,EAAcA,EAAY1H,QAAQuF,EAAcgC,EAAOzB,MAAO/B,EAAYwD,EAAOzB,OAAOlE,WAG5F,IAAMmG,EAAgB1B,EAAc2B,MAAM,IAAInI,OAAO6H,KAAiB,GAGtE,IAFAK,EAAcE,QAET5M,EAAI,EAAGC,EAAIqM,EAASvG,OAAQ/F,EAAIC,EAAGD,IACpCgM,EAASM,EAAStM,IAAM0M,EAAc1M,GAE1C,OAAOgM,GAQX9H,EAAkBtC,UAAUiL,cAAgB,SAAU7B,GAClD,IAAIjH,EAAO,KACX,GAAI+I,OAAOrH,SAASuF,GAChBjH,EAAO,IAAIC,KAAKgH,QACb,IAAK5G,KAAKD,QAAUH,KAAK+G,MAAMC,GAClCjH,EAAO,IAAIC,KAAKgH,OAEf,CACD,IAAM3G,EAAWD,KAAKC,SAAWD,KAAK2G,MAAMC,GACxC3G,EAAS0B,SACT3B,KAAKG,WAAL,kCAAsBP,KAAtB,c,sHAAA,CAA8BK,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUkK,gBAAkB,SAASiB,GACnD,OAAe,IAARA,GAAa3I,KAAKD,OAAOwI,MAAM,QAAQ5G,QASlD7B,EAAkBtC,UAAU8I,SAAW,SAAUvG,EAAQ6G,GACrD,IAAIzG,SAQJ,OANIyG,EACAzG,EAAaH,KAAKG,WAAaH,KAAKyI,cAAc7B,IACzCzG,EAAaH,KAAKG,cAC3BA,EAAaH,KAAKyI,cAAc7B,IAG7B9G,EAAkBwG,SAASnG,EAAYJ,ICruBnC,eAAC6I,GACZ,IAAIhN,EAAI,EACR,OAAO,WAAe,2BAAXiN,EAAW,qBAAXA,EAAW,gBAClBA,EAAOC,SAAQ,SAAC3H,EAAK4H,GACXH,EAAMG,aAAuBC,QAC/BJ,EAAMG,GAAcC,MAAMC,KAAK,CAAEtH,OAAQ/F,KAE7CgN,EAAMG,GAAY3C,KAAKjF,MAE3BvF,M,4MCdFsN,EAAe,SACfC,EAAgB7M,OAAOkB,UAAU+E,SACjC6G,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAelB,EAAKmB,GAIzB,IAHA,IAAI3N,EAAI2N,EAAU5H,OACd6H,GAAU,EAEP5N,GAAG,CACN,GAAIwM,IAAQmB,EAAU3N,GAElB,OADA4N,EAAS5N,EAGbA,GAAK,EAGT,OAAO4N,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,cAAOA,MAASR,SAAgB,IAAOS,EAAP,cAAOA,MAAST,EACzC,WAGP,IAAOS,EAAP,cAAOA,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,cAAOA,MAASR,IAChBQ,EAAOC,aAAgBX,MAAQ,GAAK,IAnH5C,SAASa,EAAMH,EAAMC,EAAMC,EAAWE,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAO1D,KAAKsD,GACZK,EAAO3D,KAAKuD,KALZG,EAAS,CAACJ,GACVK,EAAS,CAACJ,IAOVA,aAAgBX,MAChB,IAAKgB,EAAO,EAAGA,EAAOL,EAAKhI,OAAQqI,GAAQ,EAAG,CAC1C,IACIC,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,eAGA,IAAO6G,EAAP,cAAOA,MAAWhB,EACZU,QAAwB1J,IAAXgK,IACfR,EAAKM,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQE,aAAkBlB,MAAQ,GAAK,KAG3C,KADdoB,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,SAMrD,IAAKC,KAAQL,EAAM,CACf,IACIM,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,SAGJ,GAAe,OAAX6G,SAAmB,IAAOA,EAAP,cAAOA,MAAWhB,GAKrCiB,EAAMhB,EAAcpN,KAAKmO,MACbd,GACO,OAAXa,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAGxCI,IAAQd,GACE,OAAXY,GAAqBA,aAAkBjB,QACvCiB,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAI7CL,EAAKM,GAAQE,MAGhB,CACD,GAAIN,QAAwB1J,IAAXgK,EACb,SAEJR,EAAKM,GAAQE,GAIzB,OAAOR,EAiBPG,CAAMH,EAAMC,EAAMC,GACXF,GCnIJ,SAASW,EAASlJ,GACrB,OAAO6H,MAAMqB,QAAQlJ,GA2ClB,IAAMmJ,EAAc,wBAAY,IAAI1K,MAAO2K,UAAYvG,KAAKwG,MAAsB,IAAhBxG,KAAKyG,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKhJ,SAAWiJ,EAAKjJ,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAI+O,EAAKhJ,OAAQ/F,IAC7B,GAAI+O,EAAK/O,KAAOgP,EAAKhP,GACjB,OAAO,EAIf,OAAO,EASJ,SAASiP,EAAa1J,GACzB,OAAOA,EASJ,IAAM2J,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFhN,EAAWE,QACXoM,EAAQU,IAASV,EAAQU,EAAK,IAC9BhN,EAAWG,QACXmM,EAAQU,KAA0B,IAAhBA,EAAKpJ,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4B6J,CAASD,EAAK,KACrDhN,EAAWC,UAEf,MChDIiN,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUhP,GACvB,IAAMiP,EAASjP,GAAQmO,IA4CvB,OA1CAtK,KAAK+K,KAAKK,GAAU,CAChBjP,KAAMiP,EACNvC,OAAQsC,EAERE,UAJgB,WAKZ,IAAIA,EAAYrL,KAAKsL,iBAQrB,OANKD,IACDA,EAAYrL,KAAKsL,iBAAmB,GACpCtL,KAAK6I,OAAOC,SAAQ,SAACyC,GACjBF,EAAUE,EAAMpP,QAAUoP,MAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBzL,KAAK0L,eAUzB,OARKD,IACDA,EAAgBzL,KAAK0L,eAAiB,GACtC1L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUC,UAClC8M,EAAcF,EAAMpP,QAAUoP,OAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkB9L,KAAK+L,iBAU3B,OARK/L,KAAK+L,mBACND,EAAkB9L,KAAK+L,iBAAmB,GAC1C/L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUE,YAClCkN,EAAgBP,EAAMpP,QAAUoP,OAIrCO,IAGR9L,KAAK+K,KAAKK,K,yPCqCVY,E,WAxEX,WAAanP,EAAOoP,EAAUV,I,4FAAO,SACjC,IAAMW,EAAiBC,GAAsBZ,EAAO1O,GAEpDP,OAAO8P,iBAAiBpM,KAAM,CAC1BqM,OAAQ,CACJ7P,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,SAEJ2P,gBAAiB,CACbhQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOqP,GAEXO,eAAgB,CACZjQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOoP,KAIfjM,KAAKuL,MAAQA,E,6CAkCb,OAAO9E,OAAOzG,KAAKnD,S,gCAUnB,OAAOmD,KAAKnD,Q,4BAnCZ,OAAOmD,KAAKqM,S,qCAOZ,OAAOrM,KAAKwM,kB,oCAOZ,OAAOxM,KAAKyM,mB,KCxDb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWhL,OAAS,GACDgL,EAAWE,MAAM,KACzB/D,SAAQ,SAACgE,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAIpR,EAAIoR,EAAOpR,GAAKqR,EAAKrR,GAAK,EAC/BgR,EAAShR,M,6PCVvBsR,E,WAqBF,WAAarQ,I,4FAAO,SAChBmD,KAAKqM,OAASxP,E,wDAdOsQ,GACrB,OAAKA,EAGE7Q,OAAO8Q,OAAOF,EAAkBG,qBAAsBF,GAFlDD,EAAkBG,yB,mCAsB7B,OAAOrN,KAAKqM,S,iCAUZ,OAAO5F,OAAOzG,KAAKqM,W,iCAGNlL,GACb,OAAQA,aAAe+L,KAAwBA,EAAkBI,mBAAmBnM,K,qCAGlEA,GAClB,OAAOA,aAAe+L,EAAoB/L,EAAM+L,EAAkBI,mBAAmBnM,O,KAO7F+L,EAAkBK,KAAO,IAAIL,EAAkB,QAC/CA,EAAkBM,GAAK,IAAIN,EAAkB,MAC7CA,EAAkBO,IAAM,IAAIP,EAAkB,OAO9CA,EAAkBG,qBAAuB,CACrCK,QAASR,EAAkBM,GAC3BG,IAAKT,EAAkBO,IACvBG,KAAMV,EAAkBK,KACxBrN,UAAWgN,EAAkBM,IAGlBN,Q,8YC5ETW,EAAkB,SAACC,EAASd,EAAOC,GAIrC,IAHA,IAAMc,EAAU,GACZC,EAAOhB,EAEJgB,EAAOf,GACVc,EAAQ3H,KAAK4H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ3H,KAAK4H,GAEND,GAGLE,EAAkB,SAACC,EAAcrR,GAOnC,IANA,IAAIsR,EAAU,EACVC,EAAWF,EAAavM,OAAS,EACjC0M,SACA7M,SAGG2M,GAAWC,GAAU,CAIxB,GAAIvR,IAFJ2E,EAAQ0M,EADRG,EAASF,EAAUnK,KAAKsK,OAAOF,EAAWD,GAAW,KAGlCnB,OAASnQ,EAAQ2E,EAAMyL,IACtC,OAAOzL,EACA3E,GAAS2E,EAAMyL,IACtBkB,EAAUE,EAAS,EACZxR,EAAQ2E,EAAMwL,QACrBoB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,EAAiB,CAC1BC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,MACLC,KAAM,QAGGC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,M,wHCzBF,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAI1G,OAAOC,SAAQ,SAACyC,GAChBmE,EAAOtJ,KAAKmF,EAAMI,SAASxP,SAE/BqT,EAAI3G,OAAOC,SAAQ,SAACyC,IAC6B,IAAzCmE,EAAOvJ,QAAQoF,EAAMI,SAASxP,OAC9BsT,EAAOrJ,KAAKmF,EAAMI,SAASxP,SAG5BsT,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqD,wDAAxBC,EAAwB,uDAAblB,EAAMC,MACtFrD,EAAS,GACTZ,EAAO,GACPmF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAchU,KAClCoU,EAAoBF,EAAclU,KAClCA,EAAUgU,EAAchU,KAAxB,IAAgCkU,EAAclU,KAC9CqU,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CAqFpB,OAlFAN,EAActH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,OAAiB6T,IACpDU,EAAUvU,KAAUgU,EAAchU,KAAlC,IAA0CuU,EAAUvU,MAExDwP,EAAOvF,KAAKsK,MAEhBL,EAAcxH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,MAC9B6T,IACDU,EAAUvU,KAAUkU,EAAclU,KAAlC,IAA0CuU,EAAUvU,KACpDwP,EAAOvF,KAAKsK,IAGhB/E,EAAOvF,KAAKsK,MAKpBhE,EAAmBmD,EAAIc,aAAa,SAAC/U,GACjC,IAAIgV,GAAW,EACXC,SACJnE,EAAmBoD,EAAIa,aAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAActH,OAAOC,SAAQ,SAACyC,GAC1BwF,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAKnP,IACnCoV,EAAQV,GAAmB/E,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAKnP,GAClCsQ,eAAgBX,EAAM2F,gBAAgBtV,OAG9CyU,EAAcxH,OAAOC,SAAQ,SAACyC,IAC+B,IAAnDiF,EAAiBrK,QAAQoF,EAAMI,SAASxP,OAAgB6T,GAC1De,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAK+F,IAEvCE,EAAQT,GAAmBhF,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAK+F,GAClC5E,eAAgBX,EAAM2F,gBAAgBJ,OAI9C,IAIMK,EAAYC,GAAgBJ,EAAQV,IACpCe,EAAYD,GAAgBJ,EAAQT,IAC1C,GAAIL,EAAmBiB,EAAWE,GALb,kBAAMxB,EAAIyB,kBACV,kBAAMxB,EAAIwB,iBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBR,EAAMjI,SAAQ,SAAC0I,EAASC,GACpBF,EAAS5F,EAAO8F,GAAKtV,MAAQqV,KAE7BZ,GAAY7B,EAAMC,QAAUiB,EAC5BlF,EAAK8F,GAAeU,GAGpBxG,EAAK3E,KAAKmL,GACVX,GAAW,EACXC,EAAcjV,QAEf,IAAKqU,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMW,EAAW,GACb5I,EAAMwH,EAActH,OAAOlH,OAAS,EACxCoP,EAAMjI,SAAQ,SAAC0I,EAASC,GAEhBF,EAAS5F,EAAO8F,GAAKtV,MADrBsV,GAAO9I,EACsB6I,EAGA,QAGrCZ,GAAW,EACXC,EAAcjV,EACdmP,EAAK3E,KAAKmL,UAKf,IAAI3T,GAAUmN,EAAMY,EAAQ,CAAExP,SCjHzC,SAASuV,EAAW1O,EAAGO,GACnB,IAAMoO,EAAKA,GAAG3O,EACR4O,EAAKA,GAAGrO,EACd,OAAIoO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB,uDAAXL,EAIrC,OAHII,EAAInQ,OAAS,GArBrB,SAASqQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKjO,KAAKsK,OAAO4D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACNzW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EAC3ByW,EAAOzW,GAAKwW,EAAQxW,GAKxB,IAHA,IAAIoH,EAAIiP,EACJ1O,EAAI4O,EAAM,EAELvW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EACvBoH,EAAImP,GACJC,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GACEA,EAAI2O,GACXE,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,GACE+O,EAAOM,EAAOrP,GAAIqP,EAAO9O,KAAO,GACvC6O,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,IAELoP,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GAqBbsG,CAAMiI,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAInQ,OAAS,EAAGoQ,GAE1BD,E,0gBChCX,SAASQ,EAAqBC,EAAUC,GACpC,IAAMC,EAAmD,SAAvChM,OAAO+L,GAAc9Q,cAA2B,OAAS,MAC3E,OA9CJ,SAAoBgR,EAAUC,GAC1B,IAAIC,SAEJ,OAAQF,GACR,KAAKlU,EAAeC,WACpB,KAAKL,EAAiBE,SAEdsU,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAAJ,OAAUP,EAAIO,GAEd,SAACP,EAAGO,GAAJ,OAAUA,EAAIP,GAE5B,MACJ,QAEQ4P,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,EAAI,GAAK,GAGd,SAACP,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,GAAK,EAAI,GAKhC,OAAOqP,EAYAC,CAAUN,EAAS3G,KAAM6G,GAUpC,SAASK,EAAW/H,EAAMhC,GACtB,IAAMgK,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAlI,EAAKjC,SAAQ,SAACoK,GACV,IAAMC,EAAWD,EAAMnK,GACnBgK,EAAQK,IAAID,GACZF,EAAYF,EAAQtW,IAAI0W,IAAW,GAAG/M,KAAK8M,IAE3CD,EAAY7M,KAAK,CAAC+M,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAYtR,OAAS,OAI5CsR,EAYX,SAASK,EAAoBC,EAAcC,EAAcC,GACrD,IAAMpO,EAAM,CACRqO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,QAAO,SAACC,EAAK5F,EAAM6F,GAE5B,OADAD,EAAI5F,GAAQuF,EAAa,GAAGpL,KAAI,SAAA+K,GAAA,OAASA,EAAMO,EAAmBI,GAAK3R,UAChE0R,IACRvO,GAEIA,EAUX,SAASyO,EAAmB/I,EAAMY,EAAQoI,GAMtC,IALA,IAAIC,SACAC,SACA1B,SACA3W,EAAImY,EAAepS,OAAS,EAEzB/F,GAAK,EAAGA,IACXoY,EAAYD,EAAenY,GAAG,GAC9BqY,EAAWF,EAAenY,GAAG,IAC7B2W,EAAW2B,GAAcvI,EAAQqI,MVrFf,mBU4FHC,EAEXpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAU0Q,EAASjR,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAC1DmI,EAAQ4J,GAAW,WAC1B,IAAMhB,EAAcH,EAAU/H,EAAMwH,EAASrQ,OACvCiS,EAAYF,EAASA,EAAStS,OAAS,GACvC6R,EAAeS,EAASG,MAAM,EAAGH,EAAStS,OAAS,GACnD8R,EAAqBD,EAAarL,KAAI,SAAAkM,GAAA,OAAKH,GAAcvI,EAAQ0I,MAEvEpB,EAAYnK,SAAQ,SAACyK,GACjBA,EAAanN,KAAKkN,EAAmBC,EAAcC,EAAcC,OAGrE5B,EAAUoB,GAAa,SAACjQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO4Q,EAAUnY,EAAGqB,MAIxB0N,EAAKpJ,OAAS,EACdsR,EAAYnK,SAAQ,SAACoK,GACjBnI,EAAK3E,KAAL,MAAA2E,EAAA,EAAamI,EAAM,QAnBG,GAqBvB,WACH,IAAMnB,EAASO,EAAoBC,EAAU0B,GAE7CpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAUwO,EAAO/O,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAH5D,IAiBf,I,EAAMoS,GAAsB,SAAtBA,EAAuBC,EAAYxJ,EAAMY,EAAQoI,GACnD,GAA0B,IAAtBQ,EAAW5S,OAAgB,OAAOoJ,EAEtC,IAAMyJ,EAAYD,EAAW,GACvBpM,EAAM,IAAI6K,IAEhBjI,EAAK4I,QAAO,SAACC,EAAKa,GACd,IAAMC,EAAOD,EAAQD,EAAUtS,OAM/B,OALI0R,EAAIR,IAAIsB,GACRd,EAAInX,IAAIiY,GAAMtO,KAAKqO,GAEnBb,EAAIP,IAAIqB,EAAM,CAACD,IAEZb,IACRzL,GAdmE,2BAgBtE,YAAuBA,EAAvB,+CAA4B,wBAAlBhL,EAAkB,KAAbgE,EAAa,KAClBwT,EAAOL,EAAoBC,EAAWH,MAAM,GAAIjT,EAAKwK,EAAQoI,GACnE5L,EAAIkL,IAAIlW,EAAKwX,GACT3L,MAAMqB,QAAQsK,IACdb,EAAkBa,EAAMhJ,EAAQoI,IApB8B,6EAwBtE,OAAO5L,GA2CJ,SAASyM,GAAUC,EAASd,GAAgB,IACzCpI,EAAiBkJ,EAAjBlJ,OAAQZ,EAAS8J,EAAT9J,KAGd,GAA8B,KAD9BgJ,EAAiBA,EAAee,QAAO,SAAAC,GAAA,QAAab,GAAcvI,EAAQoJ,EAAQ,QAC/DpT,OAAnB,CAEA,IAAIqT,EAAkBjB,EAAekB,WAAU,SAAAF,GAAA,OAA0B,OAAfA,EAAQ,MAClEC,GAAuC,IAArBA,EAAyBA,EAAkBjB,EAAepS,OAE5E,IAAMuT,EAAyBnB,EAAeK,MAAM,EAAGY,GACjDG,EAAsBpB,EAAeK,MAAMY,GAEjDlB,EAAkB/I,EAAMY,EAAQuJ,GAChCnK,EA5CJ,SAAyBA,EAAMY,EAAQoI,EAAgBQ,GAQnD,GAA8B,KAP9BR,EAAiBA,EAAee,QAAO,SAACM,GACpC,OAAkB,OAAdA,EAAO,KACPb,EAAWnO,KAAKgP,EAAO,KAChB,OAIIzT,OAAgB,OAAOoJ,EAE1CwJ,EAAaA,EAAWpM,KAAI,SAAAlM,GAAA,OAAKiY,GAAcvI,EAAQ1P,MAEvD,IAAMoZ,EAAiBf,GAAoBC,EAAYxJ,EAAMY,EAAQoI,GACrE,OAAOhJ,EAAK5C,KAAI,SAACmN,GAIb,IAHA,IAAI1Z,EAAI,EACJ2Z,EAAUF,GAENrM,MAAMqB,QAAQkL,IAClBA,EAAUA,EAAQ9Y,IAAI6Y,EAAIf,EAAW3Y,KAAKsG,QAG9C,OAAOqT,EAAQ/M,WAuBZgN,CAAezK,EAAMY,EAAQwJ,EAAqBD,EAAuB/M,KAAI,SAAAiN,GAAA,OAAUA,EAAO,OAErGP,EAAQY,KAAO1K,EAAK5C,KAAI,SAAAmN,GAAA,OAAOA,EAAII,SACnCb,EAAQ9J,KAAOA,GCjPZ,SAAS4K,GAAa1K,EAAY0B,EAAYiJ,EAAe7B,EAAgBlN,GAKhFA,EAAUvK,OAAO8Q,OAAO,GAJL,CACfyI,QAAQ,EACRC,YAAY,GAEwBjP,GAExC,IAAMkP,EAAS,CACXpK,OAAQ,GACRZ,KAAM,GACN0K,KAAM,IAEJI,EAAShP,EAAQgP,OACjBG,EAAajC,GAAkBA,EAAepS,OAAS,EAEvDsU,EAAa,GAiDnB,GA/CgBL,EAAc/I,MAAM,KAE5B/D,SAAQ,SAACoN,GACb,IAAK,IAAIta,EAAI,EAAGA,EAAIqP,EAAWtJ,OAAQ/F,GAAK,EACxC,GAAIqP,EAAWrP,GAAGO,SAAW+Z,EAAS,CAClCD,EAAW7P,KAAK6E,EAAWrP,IAC3B,UAMZqa,EAAWnN,SAAQ,SAACyC,GAEhBwK,EAAOpK,OAAOvF,KAAKmF,EAAMI,aAGzBkK,GACAE,EAAOpK,OAAOvF,KAAK,CACfjK,KAAM,MACNyP,KAAM,eAIdc,EAAmBC,GAAY,SAAC/Q,GAC5Bma,EAAOhL,KAAK3E,KAAK,IACjB,IAAM+P,EAAYJ,EAAOhL,KAAKpJ,OAAS,EAEvCsU,EAAWnN,SAAQ,SAACyC,EAAOuF,GACvBiF,EAAOhL,KAAKoL,GAAWrF,EAFf,GAE6BvF,EAAM0F,aAAalG,KAAKnP,MAE7Dia,IACAE,EAAOhL,KAAKoL,GAAWF,EAAWtU,QAAU/F,GAGhDma,EAAON,KAAKrP,KAAKxK,GAIboa,GAAcD,EAAOhL,KAAKoL,GAAW/P,KAAKxK,MAI9Coa,GACApB,GAASmB,EAAQhC,GAGjBlN,EAAQiP,WAAY,CACpB,IAAMM,EAAUpN,mB,sHAAAA,CAASA,MAAM+M,EAAOpK,OAAOhK,UAASwG,KAAI,iBAAM,MAChE4N,EAAOhL,KAAKjC,SAAQ,SAACiI,GACjBA,EAAMjI,SAAQ,SAACiC,EAAMnP,GACjBwa,EAAQxa,GAAGwK,KAAK2E,SAGxBgL,EAAOhL,KAAOqL,EAGlB,OAAOL,EC1EJ,SAASM,GAAYxG,EAAKC,GAC7B,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAiBX,SAAS2E,EAAkBC,EAAIvL,EAAWwL,GACtCnK,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACPD,GAAW9L,EAAK3E,KAAK2K,GACzBuF,EAAUQ,IAAY,MASlC,OAjCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA2BtCwa,EAAkB7G,EAAK2G,GAAuB,GAC9CE,EAAkB9G,EAAK2G,GAAuB,GAEvC,IAAI5Y,GAAUmN,EAAMY,EAAQ,CAAExP,S,sPC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAAS2X,GAAkBlF,GACvB,OAAOA,EAAIgD,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAShD,SAAS+J,GAAKnF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMkO,EAAiBF,GAAkBlF,GAIzC,OAHiBoF,EAAevV,OACZuV,EAAevD,QAAO,SAACC,EAAKuD,GAAN,OAAevD,EAAMuD,IAAM,GAC/CjK,EAAkBK,KAG5C,OAAOL,EAAkBK,KAU7B,SAAS6J,GAAKtF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMqO,EAAWJ,GAAInF,GACfnJ,EAAMmJ,EAAInQ,QAAU,EAC1B,OAAQ+G,OAAO4O,MAAMD,IAAaA,aAAoBnK,EAC7CA,EAAkBK,KAAO8J,EAAW1O,EAEjD,OAAOuE,EAAkBK,KAgG7B,IAAMgK,YACDrY,GAAM+X,IADL,KAED9X,GAAMiY,IAFL,KAGDhY,IAzFL,SAAc0S,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAKyT,IAAL,MAAAzT,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA+EvB,KAIDlO,IAzEL,SAAcyS,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAK0T,IAAL,MAAA1T,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA8DvB,KAKDjO,IAzDL,SAAgBwS,GACZ,OAAOA,EAAI,MAmDT,KAMDvS,IA/CL,SAAeuS,GACX,OAAOA,EAAIA,EAAInQ,OAAS,MAwCtB,KAODnC,IArCL,SAAgBsS,GACZ,OAAIzH,EAAQyH,GACDA,EAAInQ,OAERuL,EAAkBK,QA0BvB,KAQD9N,IAbL,SAAcqS,GACV,OAAO9N,KAAK2T,KAbhB,SAAmB7F,GACf,IAAI8F,EAAOR,GAAItF,GACf,OAAOsF,GAAItF,EAAI3J,KAAI,SAAA0P,GAAA,gBAAQA,EAAMD,EAAS,OAWzBE,CAAShG,OAIxB,GAWAiG,GAAqB7Y,G,0PCzCnB0J,GAjGFoP,G,WACF,aAAe,Y,4FAAA,SACXhY,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK4I,MAAMyK,IAAI,aAAc4E,IAE7B3b,OAAO4b,QAAQX,IAAQzO,SAAQ,SAAC3L,GAC5B,EAAKyL,MAAMyK,IAAIlW,EAAI,GAAIA,EAAI,O,oDAc/B,IAAK,UAAOwE,OACR,OAAO3B,KAAK4I,MAAMnM,IAAI,cAG1B,IAAI0b,EAAUA,UAAVA,8BAEJ,GAAuB,mBAAZA,EACPnY,KAAK4I,MAAMyK,IAAI,aAAc8E,OAC1B,CAEH,GADAA,EAAU1R,OAAO0R,IAC6B,IAA1C7b,OAAO0J,KAAKuR,IAAQpR,QAAQgS,GAG5B,MAAM,IAAI1H,MAAJ,WAAqB0H,EAArB,0BAFNnY,KAAK4I,MAAMyK,IAAI,aAAckE,GAAOY,IAK5C,OAAOnY,O,+BAmCD7D,EAAMgc,GAAS,WACrB,GAAuB,mBAAZA,EACP,MAAM,IAAI1H,MAAM,gCAMpB,OAHAtU,EAAOsK,OAAOtK,GACd6D,KAAK4I,MAAMyK,IAAIlX,EAAMgc,GAEd,WAAQ,EAAKC,aAAajc,M,mCAGvBA,GACN6D,KAAK4I,MAAMwK,IAAIjX,IACf6D,KAAK4I,MAAMyP,OAAOlc,K,8BAIjBA,GACL,OAAIA,aAAgBmc,SACTnc,EAEJ6D,KAAK4I,MAAMnM,IAAIN,O,KAgBfoc,IARO,QAHd3P,GAAQ,QAIJA,GAAQ,IAAIoP,IAETpP,I,+YC5Cf,SAAS4P,GAASC,EAAWtN,EAAUuN,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWtN,GAC7B,IAAMsE,EAAS,GAEToJ,EADaJ,EAAUrI,gBACCvE,eAY9B,OAVAvP,OAAO4b,QAAQW,GAAY/P,SAAQ,YAAW,IAAT3L,EAAS,WACtCgO,GAAYA,EAASxJ,QACU,IAA3BwJ,EAAShF,QAAQhJ,IACjBsS,EAAOrJ,KAAKjJ,GAGhBsS,EAAOrJ,KAAKjJ,MAIbsS,EAyCWqJ,CAAYL,EAAWtN,GACnC4N,EAhCV,SAAwBN,GAA0B,IAAfC,EAAe,uDAAJ,GACpC3C,EAAS,GAETiD,EADaP,EAAUrI,gBACD5E,aACtByM,EAAaM,GAAaU,iBAchC,OAZA3c,OAAO0J,KAAKgT,GAAUlQ,SAAQ,SAACoQ,GACU,iBAA1BR,EAASQ,KAChBR,EAASQ,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYb,GAAac,QAAQX,EAASQ,IAC5CE,EACArD,EAAOmD,GAAeE,GAEtBrD,EAAOmD,GAAejB,EACtBS,EAASQ,GAAenB,OAGzBhC,EAcYuD,CAAcb,EAAWC,GACtCzN,EAAawN,EAAUrI,gBACvBmJ,EAAgBtO,EAAWI,YAC3BmO,EAASvO,EAAW9O,KACpBsd,EAAe,GACfC,EAAa,GACb/N,EAAS,GACToH,EAAU,GACVhI,EAAO,GACT4O,SAGJrd,OAAO4b,QAAQqB,GAAezQ,SAAQ,YAAkB,cAAhB3L,EAAgB,KAAXN,EAAW,KACpD,IAAgC,IAA5B+b,EAAUzS,QAAQhJ,IAAe4b,EAAW5b,GAG5C,OAFAwO,EAAOvF,KAAKqD,EAAQ,GAAI5M,EAAM8O,WAEtB9O,EAAM8O,SAASC,MACvB,KAAKlN,EAAUC,QACX+a,EAAWtT,KAAKjJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX6a,EAAarT,KAAKjJ,OAK9B,IAAIyc,EAAW,EACflN,EAAmB+L,EAAU9H,aAAa,SAAC/U,GACvC,IAAIie,EAAO,GACXJ,EAAa3Q,SAAQ,SAACgR,GAClBD,EAAUA,EAAV,IAAkBN,EAAcO,GAAG7I,aAAalG,KAAKnP,WAEnCsE,IAAlB6S,EAAQ8G,IACR9G,EAAQ8G,GAAQD,EAChB7O,EAAK3E,KAAK,IACVqT,EAAa3Q,SAAQ,SAACgR,GAClB/O,EAAK6O,GAAUE,GAAKP,EAAcO,GAAG7I,aAAalG,KAAKnP,MAE3D8d,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAK6O,GAAUE,GAAK,CAACP,EAAcO,GAAG7I,aAAalG,KAAKnP,OAE5Dge,GAAY,GAEZF,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAKgI,EAAQ8G,IAAOC,GAAG1T,KAAKmT,EAAcO,GAAG7I,aAAalG,KAAKnP,UAM3E,IAAIme,EAAc,GACdC,EAAgB,kBAAMvB,EAAUnH,gBAcpC,OAbAvG,EAAKjC,SAAQ,SAACwM,GACV,IAAMvE,EAAQuE,EACdoE,EAAW5Q,SAAQ,SAACgR,GAChB/I,EAAM+I,GAAKf,EAAWe,GAAGxE,EAAIwE,GAAIE,EAAeD,SAGpDpB,GACAA,EAAkBsB,wBAClBN,EAAehB,GAGfgB,EAAe,IAAI/b,GAAUmN,EAAMY,EAAQ,CAAExP,KAAMqd,IAEhDG,EC9HJ,SAASO,GAAmBrK,EAAKC,GACpC,IAIMqK,EAAkB7K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACe,EAAWE,GACf,IAAI+I,GAAc,EASlB,OARAD,EAAgBrR,SAAQ,SAACkL,GAGjBoG,IAFAjJ,EAAU6C,GAAWqG,gBACrBhJ,EAAU2C,GAAWqG,gBAAiBD,MAMvCA,GCjBR,SAASE,GAAOzK,EAAKC,GACxB,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAgBX,SAAS2E,EAAmBC,EAAIvL,GAC5BqB,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACX/L,EAAK3E,KAAK2K,GACVuF,EAAUQ,IAAY,MASlC,OAhCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA0BtCwa,EAAkB9G,EAAK2G,GACvBG,EAAkB7G,EAAK2G,GAEhB,IAAI7Y,GAAUmN,EAAMY,EAAQ,CAAExP,SCvDlC,SAASoe,GAAeC,EAAYC,EAAY1K,GACnD,OAAOH,EAAa4K,EAAYC,EAAY1K,GAAU,EAAOhB,EAAME,WAGhE,SAASyL,GAAgBF,EAAYC,EAAY1K,GACpD,OAAOH,EAAa6K,EAAYD,EAAYzK,GAAU,EAAOhB,EAAMG,Y,8PCFlDyL,G,WAUjB,WAAaxe,EAAM4O,EAAMY,EAAQvJ,I,4FAAQ,SACrCpC,KAAK7D,KAAOA,EACZ6D,KAAK2L,OAASA,EACd3L,KAAKoC,OAASA,EACdpC,KAAK+K,KAAO/K,KAAK4a,UAAU7P,G,6CAUpBA,GAAM,WACb,OAAOA,EAAK5C,KAAI,SAAA+K,GAAA,OAAS,EAAK9Q,OAAOuE,MAAMuM,EAAO,CAAEnT,OAAQ,EAAK4L,OAAO5L,gB,+PCX3D8a,G,WAQjB,WAAa5J,EAActE,I,4FAAY,SACnC3M,KAAKiR,aAAeA,EACpBjR,KAAK2M,WAAaA,E,4CAclB,MAAM,IAAI8D,MAAM,yB,+BAUhB,OAAOzQ,KAAKiR,aAAatF,S,6BAUzB,OAAO3L,KAAKiR,aAAa9U,O,6BAUzB,OAAO6D,KAAKiR,aAAatF,OAAOC,O,gCAUhC,OAAO5L,KAAKiR,aAAatF,OAAOmP,U,oCAUhC,OAAO9a,KAAKiR,aAAatF,OAAOoP,c,oCAUhC,OAAO/a,KAAKiR,aAAatF,OAAOqP,aAAehb,KAAKiR,aAAatF,OAAOxP,O,6BASpE,WACE4O,EAAO,GAIb,OAHA2B,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjCmP,EAAK3E,KAAK,EAAK6K,aAAalG,KAAKnP,OAE9BmP,I,sCAUP,MAAM,IAAI0F,MAAM,0B,gCA9FhB,MAAM,IAAIA,MAAM,yB,8BAyIhB,MAvCgB,CACZwK,QAAS,GACTC,SAAUlb,KACVgU,UAHY,SAGF7X,GAEN,OADA6D,KAAKib,QAAQ9e,KAAOA,EACb6D,MAEX2L,OAPY,SAOLA,GAEH,OADA3L,KAAKib,QAAQtP,OAASA,EACf3L,MAEX+K,KAXY,SAWPA,GAED,OADA/K,KAAKib,QAAQlQ,KAAOA,EACb/K,MAEXiR,aAfY,SAeCA,GAET,OADAjR,KAAKib,QAAQhK,aAAeA,EACrBjR,MAEX2M,WAnBY,SAmBDA,GAEP,OADA3M,KAAKib,QAAQtO,WAAaA,EACnB3M,MAEXmb,MAvBY,WAwBR,IAAIlK,EAAe,KACnB,GAAIjR,KAAKib,QAAQhK,wBAAwB0J,GACrC1J,EAAejR,KAAKib,QAAQhK,iBACzB,KAAIjR,KAAKib,QAAQtP,SAAU3L,KAAKib,QAAQlQ,KAO3C,MAAM,IAAI0F,MAAM,4BANhBQ,EAAe,IAAI0J,GAAa3a,KAAKib,QAAQ9e,KACzB6D,KAAKib,QAAQlQ,KACb/K,KAAKib,QAAQtP,OACb3L,KAAKkb,SAAS9Y,UAKtC,OAAO,IAAIpC,KAAKkb,SAASjK,EAAcjR,KAAKib,QAAQtO,kB,+PCjK/CyO,G,stBAYb,OAHKpb,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,gB,4CAUZ,MAAM,IAAI5K,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAjCmB8P,I,0PCElBU,G,stBAYb,OAHKvb,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,gB,6BAUZ,OAAOrb,KAAKiR,aAAatF,OAAO6P,O,iCAUhC,OAAOxb,KAAKiR,aAAatF,OAAOwN,UAAYpB,K,qCAShC,IACJ0D,EAAiBzb,KAAKiR,aAAatF,OAAnC8P,aACR,OAAOA,aAAwBnD,SAAWmD,EAAe5Q,I,4CAUzD,MAAM,IAAI4F,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAhEiB8P,I,0PCLhBa,G,yKAQb,MAAM,IAAIjL,MAAM,2B,+PCJHkL,G,mtBAQVxa,GAQH,OALK+L,EAAkB0O,UAAUza,GAGpB+L,EAAkB2O,eAAe1a,GAFjCsF,OAAOtF,GAAK2a,W,GAZcJ,I,0PCC1BK,G,utBASb,OAAO3d,EAAiBC,c,4CAUL,WACbwb,EAAO,IAAImC,IACXC,EAAS,GAUf,OAPAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAGb+I,K,gCAIP,OAAO,IAAIN,O,GAnCsBP,I,0PCApBe,G,mtBAoBVhb,E,GAAiB,IAAVpB,EAAU,EAAVA,OACN6D,SAKJ,GAHK5D,KAAKoc,OACNpc,KAAKoc,KAAO,IAAItc,EAAkBC,IAEjCmN,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIhB,EAAaH,KAAKoc,KAAK3T,cAActH,GACzCyC,EAASzD,EAAaA,EAAWoK,UAAY2C,EAAkBM,GAInE,OAAO5J,M,GAhC6B8X,I,0PCEvBW,G,YAQjB,WAAapL,EAActE,I,4FAAY,e,iKAAA,wDAC7BsE,EAActE,IADe,OAGnC,EAAK2P,eAAiB,KAHa,E,wXAahB,WACbzC,EAAO,IAAImC,IACXC,EAAS,GAYf,OARAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAIb+I,I,qDAWP,GAAIjc,KAAKsc,eACL,OAAOtc,KAAKsc,eAUhB,IAPA,IAAMC,EAAavc,KAAK+K,OAAO+J,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAAoB8E,MAAK,SAAChP,EAAGO,GAAJ,OAAUP,EAAIO,KACjGiZ,EAAQD,EAAW5a,OACrB8a,EAAU/T,OAAOgU,kBACjBC,SACAC,SACAC,EAAiB,EAEZjhB,EAAI,EAAGA,EAAI4gB,EAAO5gB,IACvB+gB,EAAYJ,EAAW3gB,EAAI,IAC3BghB,EAAYL,EAAW3gB,MAEL+gB,IAIlBF,EAAUzY,KAAKyT,IAAIgF,EAASG,EAAYL,EAAW3gB,EAAI,IACvDihB,KAQJ,OALKA,IACDJ,EAAU,MAEdzc,KAAKsc,eAAiBG,EAEfzc,KAAKsc,iB,+BAUZ,OAAOtc,KAAKiR,aAAatF,OAAO5L,S,sCAUnB,WACPgL,EAAO,GACP+R,EAAa9c,KAAKD,SAaxB,OAXA2M,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAErC,GAAIsR,EAAkB0O,UAAU1I,KAAY4J,GAAcpU,OAAOrH,SAAS6R,GAAS,CAE/E,IAAM6J,EAAc7P,EAAkB2O,eAAe3I,IAAUA,EAC/DnI,EAAK3E,KAAK2W,QAEVhS,EAAK3E,KAAKtG,EAAkBwG,SAAS4M,EAAO4J,OAG7C/R,K,gCAIP,OAAO,IAAIoR,O,GAjHmBf,I,0PCHjB4B,G,mtBAQV7b,GAEHA,EAAMsF,OAAOtF,GACb,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAK7ByC,EAASsJ,EAAkB2O,eAAe1a,OALP,CACnC,IAAI8b,EAAU9b,EAAIoH,MALR,2DAMV3E,EAASqZ,EAAavU,OAAOwU,WAAWD,EAAQ,IAAvC,IAA8CvU,OAAOwU,WAAWD,EAAQ,IAC9D/P,EAAkBM,GAIzC,OAAO5J,M,GApB2B8X,I,0PCArByB,G,muBASb,IAAMC,EAAUpd,KAAKiR,aAAatF,OAAO0R,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQzb,OAAS,M,6BAU7C,OAAO3B,KAAKiR,aAAatF,OAAO0R,Q,gCAIhC,OAAO,IAAIL,O,GAxBiB5B,I,0PCAfkC,G,mtBAQVnc,GACH,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIC,EAAY8b,WAAW/b,EAAK,IAChCyC,EAAS8E,OAAO4O,MAAMlW,GAAa8L,EAAkBM,GAAKpM,EAI9D,OAAOwC,M,GAjB+B8X,I,0PCGzB6B,G,utBASb,OAAO/e,EAAeC,a,4CAUH,WACfgZ,EAAM/O,OAAOgU,kBACbhF,EAAMhP,OAAO8U,kBAiBjB,OAdA9Q,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GACjCsX,aAAiBhG,IAIjBgG,EAAQuE,IACRA,EAAMvE,GAENA,EAAQwE,IACRA,EAAMxE,OAIP,CAACuE,EAAKC,M,gCAIb,OAAO,IAAI4F,O,GA1CqB/B,I,0PCNlCkC,G,WACF,c,4FAAc,SACVzd,KAAK0d,WAAa,IAAI1K,I,qDAGR8H,EAAS6C,GAEvB,OADA3d,KAAK0d,WAAWrK,IAAIyH,EAAS6C,GACtB3d,O,0BAGP4L,GACA,OAAO5L,KAAK0d,WAAWtK,IAAIxH,K,0BAG3BA,GACA,OAAO5L,KAAK0d,WAAWjhB,IAAImP,O,KAI7BgS,GAAwB,SAAChV,GAC3BA,EACiBiV,kBAAkBzf,EAAiBC,YAAa0d,IAChD8B,kBAAkBzf,EAAiBE,SAAU+d,IAC7CwB,kBAAkBzf,EAAiBG,OAAQ4e,IAC3CU,kBAAkBrf,EAAeC,WAAY8e,KAanDO,GAVQ,WACnB,IAAIlV,EAAQ,KAMZ,OAAOA,IAJHA,EAAQ,IAAI6U,GACZG,GAAsBhV,GACfA,GALQ,GCgChB,SAASmV,GAAaC,EAAYrS,EAAQsS,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQtc,SACrBsc,EAAUtS,EAAOxD,KAAI,SAAA6B,GAAA,OAAQA,EAAK7N,SAGtC8hB,EAAQnV,SAAQ,SAACqV,EAAQviB,GACrBsiB,EAAWC,GAAUviB,KAGlB+P,EAAOxD,KAAI,SAAA6B,GAAA,OAnEtB,SAAyBe,EAAMY,GAG3B,OAFAZ,EAAOA,GAAQ,GAEX+S,GAAc1K,IAAIzH,EAAOmP,SAClBgD,GAAcrhB,IAAIkP,EAAOmP,SACfsD,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WALV,MAK0B5B,EAAKpJ,OAAS,IAC9BwZ,QAEd2C,GACUrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrF+f,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WANV,MAM0B5B,EAAKpJ,OAAS,IAC9BwZ,QAgDSkD,CAAgBL,EAAWE,EAAWlU,EAAK7N,OAAQ6N,MC3ElE,QACX8S,WAAY/e,EAAWI,M,0PCANmgB,G,WACjB,WAAY1S,I,4FAAM,SACd5L,KAAKue,MAAQ3S,E,6CAQb,MAAM,IAAI6E,MAAM,qC,2BAJhB,OAAOzQ,KAAKue,U,KCThBC,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBC,GACvB,OAAO,IAAIxG,SAAS,IAAK,WAAawG,EAAQ3W,KAAI,SAAShM,EAAMP,GAC/D,OAAOmjB,KAAKC,UAAU7iB,GAAQ,OAASP,EAAI,OAC1CqH,KAAK,KAAO,KAWjB,SAASgc,GAAaC,GACpB,IAAIC,EAAY7iB,OAAOY,OAAO,MAC1B4hB,EAAU,GAUd,OARAI,EAAKpW,SAAQ,SAASwM,GACpB,IAAK,IAAI8J,KAAU9J,EACX8J,KAAUD,GACdL,EAAQ1Y,KAAK+Y,EAAUC,GAAUA,MAKhCN,EAGT,SAASjf,GAAIhD,EAAOwiB,GAClB,IAAI1hB,EAAId,EAAQ,GAAI8E,EAAShE,EAAEgE,OAC/B,OAAOA,EAAS0d,EAAQ,IAAIrW,MAAMqW,EAAQ1d,EAAS,GAAGsB,KAAK,GAAKtF,EAAIA,EAStE,SAAS2hB,GAAW3f,GAClB,IAPkBwE,EAOd3B,EAAQ7C,EAAK4f,cACbC,EAAU7f,EAAK8f,gBACfC,EAAU/f,EAAKggB,gBACfC,EAAejgB,EAAKkgB,qBACxB,OAAOvI,MAAM3X,GAAQ,iBAXHwE,EAYDxE,EAAKmgB,kBAXR,EAAI,IAAMjgB,IAAKsE,EAAM,GAC/BA,EAAO,KAAO,IAAMtE,GAAIsE,EAAM,GAC9BtE,GAAIsE,EAAM,IAS+B,IAAMtE,GAAIF,EAAKogB,cAAgB,EAAG,GAAK,IAAMlgB,GAAIF,EAAKqgB,aAAc,IAC1GJ,EAAe,IAAM/f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAAM7f,GAAI+f,EAAc,GAAK,IACnHF,EAAU,IAAM7f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAChFF,GAAWhd,EAAQ,IAAM3C,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IACjE,IAGO,gBAASS,GACtB,IAAIC,EAAW,IAAI9f,OAAO,KAAQ6f,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAU/f,EAAM+T,GACvB,IAIIvX,EAJAoiB,EAAO,GACPoB,EAAIhgB,EAAKqB,OACT4e,EAAI,EACJljB,EAAI,EAEJmjB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAASpa,IACP,GAAIma,EAAK,OAAO/B,GAChB,GAAIgC,EAAK,OAAOA,GAAM,EAAOjC,GAG7B,IAAI5iB,EAAUK,EAAPykB,EAAIH,EACX,GAAIjgB,EAAK8f,WAAWM,KAAOhC,GAAO,CAChC,KAAO6B,IAAMD,GAAKhgB,EAAK8f,WAAWG,KAAO7B,IAASpe,EAAK8f,aAAaG,KAAO7B,KAI3E,OAHK9iB,EAAI2kB,IAAMD,EAAGE,GAAM,GACdvkB,EAAIqE,EAAK8f,WAAWG,QAAU5B,GAAS8B,GAAM,EAC9CxkB,IAAM2iB,KAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,GACpEjgB,EAAK8T,MAAMsM,EAAI,EAAG9kB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOggB,EAAID,GAAG,CACZ,IAAKrkB,EAAIqE,EAAK8f,WAAWxkB,EAAI2kB,QAAU5B,GAAS8B,GAAM,OACjD,GAAIxkB,IAAM2iB,GAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,OACtE,GAAItkB,IAAMkkB,EAAW,SAC1B,OAAO7f,EAAK8T,MAAMsM,EAAG9kB,GAIvB,OAAO4kB,GAAM,EAAMlgB,EAAK8T,MAAMsM,EAAGJ,GAGnC,IA7BIhgB,EAAK8f,WAAWE,EAAI,KAAO3B,MAAW2B,EACtChgB,EAAK8f,WAAWE,EAAI,KAAO1B,MAAU0B,GA4BjCxjB,EAAIuJ,OAAaoY,IAAK,CAE5B,IADA,IAAInJ,EAAM,GACHxY,IAAM0hB,IAAO1hB,IAAM2hB,IAAKnJ,EAAIlP,KAAKtJ,GAAIA,EAAIuJ,IAC5CgO,GAA4B,OAAtBiB,EAAMjB,EAAEiB,EAAKjY,OACvB6hB,EAAK9Y,KAAKkP,GAGZ,OAAO4J,EAGT,SAASyB,EAAczB,EAAMJ,GAC3B,OAAOI,EAAK/W,KAAI,SAASmN,GACvB,OAAOwJ,EAAQ3W,KAAI,SAASiX,GAC1B,OAAOwB,EAAYtL,EAAI8J,OACtBnc,KAAKgd,MAkBZ,SAASY,EAAUvL,GACjB,OAAOA,EAAInN,IAAIyY,GAAa3d,KAAKgd,GAGnC,SAASW,EAAY/jB,GACnB,OAAgB,MAATA,EAAgB,GACjBA,aAAiB+C,KAAO0f,GAAWziB,GACnCqjB,EAASY,KAAKjkB,GAAS,IAAM,IAAOA,EAAM0D,QAAQ,KAAM,MAAU,IAClE1D,EAGR,MAAO,CACL8J,MA5FF,SAAerG,EAAM+T,GACnB,IAAI0M,EAASjC,EAASI,EAAOmB,EAAU/f,GAAM,SAASgV,EAAK1Z,GACzD,GAAImlB,EAAS,OAAOA,EAAQzL,EAAK1Z,EAAI,GACrCkjB,EAAUxJ,EAAKyL,EAAU1M,EAtD/B,SAAyByK,EAASzK,GAChC,IAAI/W,EAASuhB,GAAgBC,GAC7B,OAAO,SAASxJ,EAAK1Z,GACnB,OAAOyY,EAAE/W,EAAOgY,GAAM1Z,EAAGkjB,IAmDMkC,CAAgB1L,EAAKjB,GAAKwK,GAAgBvJ,MAGzE,OADA4J,EAAKJ,QAAUA,GAAW,GACnBI,GAuFPmB,UAAWA,EACXtgB,OA5BF,SAAgBmf,EAAMJ,GAEpB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrC,CAACJ,EAAQ3W,IAAIyY,GAAa3d,KAAKgd,IAAYgB,OAAON,EAAczB,EAAMJ,IAAU7b,KAAK,OA2B5Fie,WAxBF,SAAoBhC,EAAMJ,GAExB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrCyB,EAAczB,EAAMJ,GAAS7b,KAAK,OAuBzCke,WApBF,SAAoBjC,GAClB,OAAOA,EAAK/W,IAAI0Y,GAAW5d,KAAK,SC1IhCme,GAAMC,GAAI,KCAVC,IDEkBF,GAAIza,MACAya,GAAIf,UACPe,GAAIrhB,OACAqhB,GAAIF,WACJE,GAAID,WCNrBE,GAAI,OAEQC,GAAI3a,MACA2a,GAAIjB,UACPiB,GAAIvhB,OACAuhB,GAAIJ,WACJI,GAAIH,WC8ChBI,OAnCf,SAAgBzP,EAAKnG,EAAQ9E,GACzB,IAAKmC,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAEpB,IAGM+Q,EAAe7V,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QACzD0K,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,GAGuB7a,GAE3C,IAAMiY,EAAU,GACV1Y,EAAOub,EAAY7C,GAErBb,EAAUuD,EACV3a,EAAQ6a,iBAGRzD,EAAUnM,EAAItK,OAAO,EAAG,GAAG,IAG/B,IAAMoa,EAAY3D,EAAQtK,QAAO,SAACC,EAAKiO,EAAGjmB,GAAT,OAC7BU,OAAO8Q,OAAOwG,G,EAAYhY,G,EAAJimB,K,EAAtB,I,sGACD,IAUH,OARA/P,EAAIhJ,SAAQ,SAACD,GACT,IAAM0C,EAAQ,GAKd,OAJAiW,EAAa1Y,SAAQ,SAACgZ,GAClB,IAAMC,EAAYH,EAAUE,GAC5BvW,EAAMnF,KAAKyC,EAAOkZ,OAEf3b,eAAQmF,MAEZ,CAACiW,EAAc1C,IChBXkD,OAXf,SAAiB7X,EAAKwB,EAAQ9E,GAK1BA,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,EAChBO,eAAgB,KAEuBpb,GAE3C,IAAMwa,EAAMa,GAAMrb,EAAQob,gBAC1B,OAAOV,GAAOF,EAAIhB,UAAUlW,GAAMwB,EAAQ9E,I,0PC5BzBsb,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJpkB,EAAWE,U,0WAGb8M,EAAMY,EAAQ9E,GAClB,OAAOmb,GAAOjX,EAAMY,EAAQ9E,O,GANYyX,ICqDjC8D,OA7Bf,SAAmBtQ,EAAKnG,GACpB,IAAK3C,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAGpB,IAAM0N,EAAS,GACXviB,EAAI,EACJymB,SACEvD,EAAU,GACV1Y,EAAOub,EAAY7C,GACnBwD,EAAmB3W,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QAgB7D,OAdA2V,EAAIhJ,SAAQ,SAACkB,GACT,IAAMnB,EAAS,GACfyZ,EAAiBxZ,SAAQ,SAAC2Y,GAClBA,KAActD,EACdkE,EAAiBlE,EAAOsD,IAExBtD,EAAOsD,GAAc7lB,IACrBymB,EAAiBzmB,EAAI,GAEzBiN,EAAOwZ,GAAkBrY,EAAKyX,MAElCrb,eAAQyC,MAGL,CAACvM,OAAO0J,KAAKmY,GAASW,I,0PClDZyD,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJxkB,EAAWC,Y,0WAGb+M,EAAMY,EAAQ9E,GAClB,OAAOub,GAASrX,EAAMY,EAAQ9E,O,GANKyX,I,0PCAtBkE,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJzkB,EAAWG,U,0WAGb6M,EAAMY,EAAQ9E,GAClB,OAAO0a,GAAOxW,EAAMY,EAAQ9E,O,GANWyX,ICmBhCmE,OAXf,SAAe1X,EAAMY,EAAQ9E,GACzB,IAAM6b,EAAa,CAAEN,YAAUJ,UAAQT,WACjCzE,EAAahS,EAAiBC,GAEpC,IAAK+R,EACD,MAAM,IAAIrM,MAAM,mCAGpB,OAAOiS,EAAW5F,GAAY/R,EAAMY,EAAQ9E,I,0PChB3B8b,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJ5kB,EAAWI,O,0WAGb4M,EAAMY,EAAQ9E,GAClB,OAAO1I,GAAK4M,EAAMY,EAAQ9E,O,GANayX,I,6PCEzCsE,G,WACF,c,4FAAc,SACV5iB,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK0iB,WAAW1iB,KAAK6iB,yB,2DAIrB,MAAO,CACH,IAAIV,GACJ,IAAIK,GACJ,IAAID,GACJ,IAAII,M,mCASgB,WAAjBD,EAAiB,uDAAJ,GAEpB,OADAA,EAAW5Z,SAAQ,SAAAga,GAAA,OAAa,EAAKla,MAAMyK,IAAIyP,EAAUlX,KAAMkX,MACxD9iB,KAAK4I,Q,+BAQPka,GACL,OAAIA,aAAqBxE,IACrBte,KAAK4I,MAAMyK,IAAIyP,EAAUlX,KAAMkX,GACxB9iB,MAEJ,O,iCASA8iB,GAEP,OADA9iB,KAAK4I,MAAMyP,OAAOyK,EAAUlX,MACrB5L,O,0BAGP7D,GACA,OAAI6D,KAAK4I,MAAMwK,IAAIjX,GACR6D,KAAK4I,MAAMnM,IAAIN,GAEnB,S,KAeA4mB,GAVS,WACpB,IAAIna,EAAQ,KAMZ,OAAOA,IAHHA,EAAQ,IAAIga,IAJI,G,ioBCjDxB,SAASI,GAAsBna,EAAQqI,EAAe+R,EAASrnB,GAC3D,IAAMsnB,EAAO,GADiD,uBAG9D,YAA2Bra,EAAOqP,UAAlC,+CAA6C,yBAAjC/a,EAAiC,KAA5BoO,EAA4B,KACzC2X,EAAK3X,EAAMpP,QAAU,IAAI6P,EAAMkF,EAAc/T,GAAKvB,GAAIqnB,EAAQ9lB,GAAKvB,GAAI2P,IAJb,6EAM9D,OAAO2X,EAGJ,SAAS9R,GAAiBvI,GAC7B,IAAMqa,EAAO,GAEb,IAAK,IAAM/lB,KAAO0L,EACdqa,EAAK/lB,GAAO,IAAI6O,EAAMnD,EAAO1L,GAAK+O,eAAgBrD,EAAO1L,GAAK8O,SAAU9O,GAE5E,OAAO+lB,EAGJ,IAAMC,GAAe,SAAC,EAA6BC,EAAmBC,GAAmB,cAAlE1W,EAAkE,KAAtDiJ,EAAsD,KACxF0N,EAAS1N,EAAcjU,OAASiU,EAAc/I,MAAM,KAAO,GAC3D0W,EAAkBH,EAAkB/X,YACpCmY,EAAYF,EAAOnb,KAAI,SAAAsb,GAAA,OfIxB,SAAoCxS,EAActE,GAAY,IACzDhB,EAAWsF,EAAXtF,OAER,OAAImS,GAAc1K,IAAIzH,EAAOmP,SAClBgD,GAAcrhB,IAAIkP,EAAOmP,SACfsD,QACAnN,aAAaA,GACbtE,WAAWA,GACXwO,QAEd2C,GACUrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrF+f,QACAnN,aAAaA,GACbtE,WAAWA,GACXwO,QenBkBuI,CAA2BH,EAAgBE,GAAMxS,aAActE,MAClG,OAAO1B,EAAWC,gBAAgBsY,EAAWH,IAGpCM,GAA2B,SAACC,EAAOC,GAAuC,IACzC,EADa1W,EAA4B,uDAAnB,GAAI2W,EAAe,aAC/ED,IAActV,EAAeI,SAC7BiV,EAAMG,YAAYpiB,OAAS,GAC3B,EAAAiiB,EAAMG,aAAY3d,KAAlB,WAA0B0d,KAE1BF,EAAMG,YAAY3d,KAAK,CACnB4d,GAAIH,EACJI,KAAM9W,EACN+W,SAAUJ,KAITK,GAA4B,SAACC,EAAUC,GAAU,OAC1D,EAAAA,EAAMC,qBAAoBle,KAA1B,WAAkCge,EAASE,qBAA3C,UAAmEF,EAASL,gBAGnEQ,GAAqB,SAACH,EAAUR,EAAOC,GAAuC,IAA5B1W,EAA4B,uDAAnB,GAAI2W,EAAe,aACvFH,GAAyBC,EAAOC,EAAW1W,EAAQ2W,GACnDK,GAA0BC,EAAUR,IAGlCY,aACD3lB,EAAcC,OAAS,CACpB2lB,UAAW,CAAC,cACZC,SAAU,EAAC,GAAM,KAHnB,MAKD7lB,EAAcE,QAAU,CACrB0lB,UAAW,CAAC,oBACZC,SAAU,EAAC,GAAO,KAPpB,MASD7lB,EAAcG,IAAM,CACjBylB,UAAW,CAAC,aAAc,oBAC1BC,SAAU,EAAC,GAAM,KAXnB,IAeAC,GAAqB,SAAChY,EAAY/Q,EAAGgpB,GACvC,IAA2B,IAAvBA,GAA4BhpB,IAAOgpB,EAAoB,EAAI,CAC3D,IAAMC,EAAKlY,EAAWhL,OAAS,EAE/BgL,EAAWkY,GAASlY,EAAWkY,GAAIhY,MAAM,KAAK,GAA9C,IAAoDjR,OAEpD+Q,EAAWvG,KAAX,GAAmBxK,IAIdkpB,GAA2B,SAACnY,EAAYoY,EAAShoB,GAC1D,IAEMioB,EAAgB,GAChBC,EAAgB,GAJ6C,KAM9BT,GAAcznB,GAAM2nB,SANU,GAM5DQ,EAN4D,KAM9CC,EAN8C,KAanE,OALAzY,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMwpB,EAAgBL,EAAQnpB,GAC9BwpB,GAAiBF,GAAgBP,GAAmBK,EAAeppB,GAT5C,IAUtBwpB,GAAiBD,GAAgBR,GAAmBM,EAAerpB,GAT7C,MAWpB,CACH+Q,WAAYqY,EAAc/hB,KAAK,KAC/BoiB,iBAAkBJ,EAAchiB,KAAK,OAKhCqiB,GAA0B,SAAC3Y,EAAYoY,EAAShoB,EAAM0c,EAAcF,GAC7E,IAAIqL,EAAoB,GAClBW,EAAkB,GAClBC,EAAe,GAyBrB,OAvBA9Y,EAAmBC,GAAY,SAAC/Q,GAC5B,GAAImpB,EAAQnpB,GAAI,CACZ,IAAIie,EAAO,GAEP4L,EAAe,CAAEzf,KAAM,IAE3ByT,EAAa3Q,SAAQ,SAACgR,GAClB,IAAM/O,EAAOwO,EAAcO,GAAG7I,aAAalG,KAAKnP,GAChDie,EAAUA,EAAV,IAAkB9O,EAClB0a,EAAazf,KAAK8T,GAAK/O,UAGG7K,IAA1BqlB,EAAgB1L,KAChB0L,EAAgB1L,GAAQ,GACxB+K,EAAkB/K,IAAS,EAC3B2L,EAAa3L,GAAQ4L,GAGzBd,GAAmBY,EAAgB1L,GAAOje,EAAGgpB,EAAkB/K,IAC/D+K,EAAkB/K,GAAQje,MAI3B,CACH2pB,kBACAC,iBAKKE,GAAe,SAACC,EAAUC,EAAUzY,EAAQiX,EAAUyB,GAC/D,IAAI9L,EAAc,GACdC,EAAgB,kBAAMoK,EAAS9S,gBAC3BvU,EAASoQ,EAATpQ,KACF4P,EAAagZ,EAAShV,YACtB9H,EAAS8c,EAASG,uBAAuBjd,OACzCkd,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAChD8U,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAShD,OAAO8a,EAASlZ,GAPS,SAAAzK,GAAA,OAAS0jB,EAC9B5C,GAAqBna,EAAQkd,EAAqBC,EAAe9jB,GACjEA,EACA8X,EACAD,KAG0Chd,IAGrCkpB,GAAqB,SAACrC,GAC/B,IAAM+B,EAAW/B,EAAMsC,OAAM,GACvB9C,EAAoBQ,EAAMkC,uBAShC,OARAH,EAASjP,eAAiB0M,EAAkBva,OAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK,KAG3EmgB,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KACnCia,EAAS1L,wBAAwBkM,wBAE1BR,GAGLS,GAAS,SAACtU,EAAK/G,EAAMsb,GAGvB,IAFA,IAAIlpB,EAAMkpB,EAAGvU,EAAK/G,EAAM,GAEfnP,EAAI,EAAG+M,EAAMmJ,EAAInQ,OAAQ/F,EAAI+M,EAAK/M,IACvCuB,EAASA,EAAT,IAAgBkpB,EAAGvU,EAAK/G,EAAMnP,GAElC,OAAOuB,GAGEmpB,GAAyB,SAAC1C,EAAO2C,GAA4B,IAAhBpZ,EAAgB,uDAAP,GAC3DqZ,EAAM,GACJ3C,EAAY1W,EAAO0W,WAAaxU,EAChCoX,EAAkBtZ,EAAOsZ,kBAAmB,EAC5CC,EAAcT,GAAmBrC,GACjC+C,EAAoBD,EAAYE,kBAKlCJ,EAHCD,EAAW5kB,OAGN4kB,EAAWpe,KAAI,SAAA0e,GAAA,OACbC,SACEjS,GAF0B4D,EAqCjCoO,GAnC2BE,UACpBC,EAAevO,EAAUmO,kBACzB/N,EAAavc,OAAO0J,KAAKyS,EAAUrI,gBAAgBvE,gBACpDiJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBM,EAAOpO,EAAWlX,OAClBulB,EAAUrO,EAAW1Q,KAAI,SAAAjM,GAAA,OAC3B8qB,EAAa9qB,GAAGgG,SACd8W,EAAW1c,OAAO0J,KAAKyS,EAAUrI,gBAAgB5E,cAClDsJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBQ,EAAc1O,EAAUrI,gBAAgB/E,YACxCN,EAAO8J,EAAQ9J,KACfkR,EAASjD,EAASrF,QAAO,SAACC,EAAKwT,GAEjC,OADAxT,EAAIwT,GAAKD,EAAYC,GAAGnL,SACjBrI,IACR,IACGyT,EAAY,GAElBP,EAAQ,SAAChV,EAAKwD,EAAKzB,GAAX,OAAmByB,EAAIxD,EAAI+B,KAC/BoT,GACAlc,EAAKjC,SAAQ,SAACwM,GACV,IAAMnY,EAAMipB,GAAOc,EAAS5R,EAAKwR,GACjCO,EAAUlqB,GAAO,KAIzB2pB,EAAQ,SAAChV,EAAKjJ,EAAQgL,GAAd,OAAsBhL,EAAOiJ,EAAI+B,IAAMwG,eACxCtP,EAAKpJ,OAAS,SAACkH,GAClB,IAAMye,GAAUL,GAAOI,EAAUjB,GAAOvN,EAAYhQ,EAAQie,IAE5D,OAAIL,EACOzN,EAASuO,OAAM,SAAAhc,GAAA,OAAS1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,IACxE1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,OAAO+b,EAErDA,GACP,kBAAM,GApCqB,IAAC7O,EAC5BqO,EACEjS,EACAmS,EACAnO,EAEAoO,EACAC,EAEAlO,EAEAmO,EACApc,EACAkR,EAIAoL,KAnBJ,CAAC,kBAAM,IAqDjB,OAVIxD,IAAcxU,EACEqX,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIe,OAAM,SAAAlB,GAAA,OAAMA,EAAGxd,QAAU,CACtE4e,WAAW,IAGCf,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIkB,MAAK,SAAArB,GAAA,OAAMA,EAAGxd,QAAU,CACrE4e,WAAW,KA+CVE,GAAuB,SAAChC,EAAUhZ,EAAYyX,EAAUwD,EAAchC,GAC/ED,EAAShV,YAAchE,EACvBgZ,EAAS1L,wBAAwBkM,wBACjC5B,GACIH,EACAuB,EACApX,EAAeC,OACd,CAAErB,OAAQya,GACThC,IA+BGiC,GAAmB,SAACzD,EAAU0D,EAAW3a,EAAQ4a,GAC1D,IAAMC,EAAS5D,EAAS8B,MAAM/Y,EAAOsa,WACjCQ,EAAgBH,EAiBpB,OAhBI3a,EAAOpQ,OAAS8B,EAAcE,UAC9BkpB,EAAgBF,EAAUjT,QAAO,SAAAd,GAAA,OAA+C,IAAlC8T,EAAU3hB,QAAQ6N,OAIpEgU,EAAOtR,eAAiBuR,EAAchlB,KAAK,KAC3C+kB,EAAO/N,wBAAwBkM,wBAE/B5B,GACIH,EACA4D,EACAzZ,EAAeE,QACf,CAAEqZ,YAAW3a,SAAQ+a,gBAAiBD,GACtC,MAGGD,GAIEG,GAAmB,SAAC/D,EAAUgE,EAAcjb,EAAQ4a,GAAjC,OAC5BK,EAAajgB,KAAI,SAAAkgB,GAAA,OACbR,GAAiBzD,EAAUiE,EAAYlb,EAAQ4a,OAE1CO,GAAqB,SAAC7G,GAO/B,IALAA,EAAahY,EAAQ,GAAIgY,IACT7V,OACZ6V,EAAW7V,KAAOlN,EAAUE,YAG3B6iB,EAAW3G,QACZ,OAAQ2G,EAAW7V,MACnB,KAAKlN,EAAUC,QACX8iB,EAAW3G,QAAUtc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX6iB,EAAW3G,QAAU1c,EAAiBC,YAK9C,OAAOojB,GA6BE8G,GAA4B,SAAA5c,GAAA,OAAUA,EAAOxD,KAAI,SAACsZ,GAG3D,OA7B8B,SAACA,GAC/B,IAAM+G,EAA2B,CAAChqB,EAAeC,YAC3CgqB,EAAuB,CACzBrqB,EAAiBC,YACjBD,EAAiBG,OACjBH,EAAiBE,SACjBF,EAAiBsqB,KAEb9c,EAAwB6V,EAAxB7V,KAAMkP,EAAkB2G,EAAlB3G,QAAS3e,EAASslB,EAATtlB,KAEvB,OAAQyP,GACR,KAAKlN,EAAUE,UACX,IAA+C,IAA3C6pB,EAAqBtiB,QAAQ2U,GAC7B,MAAM,IAAIrK,MAAJ,qDAA+DqK,EAA/D,aAAmF3e,EAAnF,UAEV,MACJ,KAAKuC,EAAUC,QACX,IAAmD,IAA/C6pB,EAAyBriB,QAAQ2U,GACjC,MAAM,IAAIrK,MAAJ,mDAA6DqK,EAA7D,aAAiF3e,EAAjF,UAEV,MACJ,QACI,MAAM,IAAIsU,MAAJ,wCAAkD7E,EAAlD,aAAmEzP,EAAnE,WAMVwsB,CADAlH,EAAa6G,GAAmB7G,IAEzBA,MAeEmH,GAAa,SAACC,EAAU9d,EAAMY,EAAQ9E,GAC/C8E,EAAS4c,GAA0B5c,GACnC9E,EAAUvK,OAAO8Q,OAAO9Q,OAAO8Q,OAAO,GAAI0b,IAAgBjiB,GAC1D,IAAMic,EAAYC,GAAetmB,IAAIoK,EAAQiW,YAG7C,IAAKgG,EACD,MAAM,IAAIrS,MAAJ,mCAA6C5J,EAAQiW,WAArD,WAPiD,MAU3BgG,EAAU/B,QAAQhW,EAAMY,EAAQ9E,GAVL,UAUpDsX,EAVoD,KAU5CjN,EAV4C,MAZ/B,SAACvF,EAAQod,GACrCpd,EAAO7C,SAAQ,SAAC2Y,GACZ,IAAMuH,EAAcvH,EAAWwH,GAC/B,GAAKD,EAAL,CAEA,IAAMnV,EAAMkV,EAAW5iB,QAAQsb,EAAWtlB,MAC1C4sB,EAAWlV,GAAOmV,EAClBvH,EAAWtlB,KAAO6sB,SACXvH,EAAWwH,OAetBC,CAAiBvd,EAAQwS,GACzB,IAAMhT,EAAW4S,GAAa7M,EAAevF,EAAQwS,GAG/CgL,EAAYle,EAAWC,gBAAgBC,EAAUtE,EAAQ1K,MAC/D0sB,EAASO,mBAAqBD,EAG9BN,EAASlY,YAAcO,EAAcvP,QAAUuP,EAAc,GAAGvP,OAAzC,MAAuDuP,EAAc,GAAGvP,OAAS,GAAM,GAG9G,IAAM0nB,EAAe,GACbxgB,EAAWsgB,EAAXtgB,OACFmd,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAC1Cgb,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAQtD,OAPAxE,EAAmBmc,EAASlY,aAAa,SAAC/U,GACtCytB,EAAaztB,GAAKonB,GAAqBna,EAAQkd,EAAqBC,EAAepqB,MAEvFutB,EAAUG,oBAAsBD,EAEhCR,EAASnS,eAAkB/K,EAAOxD,KAAI,SAAA2R,GAAA,OAAKA,EAAE3d,QAAO8G,OACpD4lB,EAASU,YAAc1iB,EAAQiW,aAAe/e,EAAWI,KAAO2M,EAAiBC,GAAQlE,EAAQiW,WAC1F+L,GAGE3U,GAAgB,SAACvI,EAAQJ,GAGlC,IAFA,IAAI3P,EAAI,EAEDA,EAAI+P,EAAOhK,SAAU/F,EACxB,GAAI2P,IAAUI,EAAO/P,GAAGO,KACpB,MAAO,CACHA,KAAMoP,EACNK,KAAMD,EAAO/P,GAAGkf,SAAWnP,EAAO/P,GAAGgQ,KACrC1J,MAAOtG,GAInB,OAAO,MA+BL4tB,GAAgC,SAAC3C,EAAWpO,GAC9C,IAAMgR,EAAchR,EAAUiR,iBAC1BC,EAAiB9C,EAerB,OAbA4C,EAAY3gB,SAAQ,SAAC8gB,GACjB,GAAKA,EAAL,CADgC,IAMjB,EANiB,EAhCF,SAACA,GACnC,IAAIC,EAAS,GACThG,SAEJ,OADAA,EAAY+F,EAAW5F,IAEvB,KAAKzV,EAAeC,OAChBqb,EAAS,CAACD,EAAW1F,UACrB,MACJ,KAAK3V,EAAeE,QAChBob,EAAS,CAACD,EAAW3F,KAAKiE,iBAC1B,MACJ,KAAK3Z,EAAeO,KAChB+a,EAAS,CAACD,EAAW1F,UACrB,MACJ,KAAK3V,EAAeG,QAChBmV,EAAY,UACZgG,EAAS,CAACD,EAAW3F,KAAK6F,cAAcjd,MAAM,KAAM+c,EAAW1F,UAC/D,MACJ,QACIL,EAAY,KAGhB,MAAO,CACHA,YACAgG,UAa8BE,CAAuBH,GAA7C/F,EALwB,EAKxBA,UAAWgG,EALa,EAKbA,OACnB,GAAIhG,EACA8F,GAAiB,EAAAA,GAAe9F,GAAf,WAA6BgG,GAA7B,QAAqC,CAClDpC,WAAW,UAKhBkC,GAWLK,GAAuB,SAAvBA,EAAwBvR,EAAWoO,GAA8C,IAAnC1Z,EAAmC,uDAA1B,GAAI8c,EAAsB,uDAAP,GACtEC,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,eAAiB,GAEpD,GAAI1R,IAAcyR,EAAlB,CAIA,IAAME,GAAYD,EAAcxoB,SAA+C,IAAtCwoB,EAAchkB,QAAQsS,GAE/D2R,GAAa3R,EAAU4R,kBAAkBxD,EAAW1Z,GAEpD,IAAMmd,EAAW7R,EAAU8R,UAC3BD,EAASxhB,SAAQ,SAAC0hB,GACd,IAAMb,EAAiBH,GAA8B3C,EAAW2D,GAChER,EAAqBQ,EAAOb,EAAgBxc,EAAQ8c,QAI/CQ,GAAsB,SAAC7G,GAChC,KAAOA,EAAM8G,SAAW9G,EAAMG,YAAY4G,MAAK,SAAAzuB,GAAA,OAAKA,EAAE8nB,KAAOzV,EAAeG,YACxEkV,EAAQA,EAAM8G,QAElB,OAAO9G,GAGEgH,GAAmB,SAAChH,GAC7B,KAAOA,EAAM8G,SACT9G,EAAQA,EAAM8G,QAElB,OAAO9G,GAGEiH,GAAqB,SAACjH,GAC/B,IADoD,IAAdkH,EAAc,uDAAP,GACtClH,EAAM8G,SACTI,EAAK1kB,KAAKwd,GACVA,EAAQA,EAAM8G,QAElB,OAAOI,GAGEC,GAA2B,SAACC,EAAaC,EAAYC,EAAgB/d,GAC9E,IAAI+W,SACA2C,SACIsE,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Bpe,EAAOoe,4BAMvCC,EAAY,GAEhB,GAAoB,OAAhBR,IAA8C,IAAtB7d,EAAOse,WAC/BD,EAAY,CAAC,CACTtH,SAAU,KAEdA,EAAW,OACR,OACCwH,EAAkBpvB,OAAOqvB,OAAOR,EAAqBS,iBAC/B,IAAtBR,IACAM,EAAkBA,EAAgB5W,QAAO,SAAA5Y,GAAA,OAAKA,EAAEiR,OAAOme,WAAaD,MAGxE,IAAMQ,EAAmBH,EAAgB5W,QAlB5B,SAACgX,GAEd,OADe3e,EAAO4C,UAAa,kBAAM,IAC3B+b,EAAO3e,MAgBqChF,KAAI,SAAA4jB,GAAA,OAAUA,EAAO5e,OAAO+W,YAEhFiG,EAAgB,GAEtB,IAA0B,IAAtBiB,EAA6B,CAC7B,IAAMY,EAAwB1vB,OAAOqvB,OAAOR,EAAqBS,gBAEjEI,EAAsBljB,SAAQ,SAACmjB,GAC3B,IAAMC,EAAaD,EAAU9e,QACI,IAA7B+e,EAAWC,eAA2BD,EAAWH,SAAW5e,EAAO4e,QAC/DG,EAAWZ,WAAaD,IAC5BlB,EAAc/jB,KAAK6lB,EAAUrI,QAC7BM,EAAW8H,EAAsBlX,QAAO,SAAA5Y,GAAA,OAAKA,IAAM+vB,KAAW9jB,KAAI,SAAAjM,GAAA,OAAKA,EAAEiR,OAAO+W,aACvEviB,QAAU6pB,EAAUplB,KAAK,CAC9B8d,WACAkI,OAAQH,EAAUrI,MAClBkH,KAAMD,GAAmBoB,EAAUrI,aAOnDM,GAAW,MAAGjD,OAAH,qBAAiB4K,GAAjB,CAAmCb,KAAclW,QAAO,SAAA5Y,GAAA,OAAW,OAANA,KACxEsvB,EAAUplB,KAAK,CACX8d,WACAiG,wBAAmBA,EAAnB,GAAqChd,EAAOgd,eAAiB,OAIrE,IAAMkC,EAAYpB,EAAWrH,MAEvB0I,EAAahwB,OAAO8Q,OAAO,CAC7Bmf,kBAAmBvB,EACnBK,uBACDle,GAEGqf,EAAmBvB,EAAWwB,aAChClB,GAA+BiB,IAC/B3F,EAAYP,GAAuBkG,EAAkBtI,EAAU,CAC3DuC,gBAAiB8E,IAErBvB,GAAqBwC,EAAkB3F,EAAWyF,IAGtDd,EAAU1iB,SAAQ,SAAC4jB,GACf,IAAMC,EAAmBrG,GAAuB+F,EAAWK,EAAIxI,UACzD4G,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAM8B,EA3HO,SAAC/F,EAAWiE,GACjC,IAAK,IAAIlvB,EAAI,EAAG+M,EAAMmiB,EAAKnpB,OAAQ/F,EAAI+M,EAAK/M,IAAK,CAC7C,IAAMgoB,EAAQkH,EAAKlvB,GACnBirB,EAAY2C,GAA8B3C,EAAWjD,GAEzD,OAAOiD,EAsHuBgG,CAAiBF,EAAkB7B,EAAKgC,WAC9DJ,EAAIN,OAAO/B,kBAAkBuC,EAAeN,QAE5CtC,GAAqBqC,EAAWM,EAAkBL,EAAY,CAC1DnC,cAAeuC,EAAIvC,cACnBD,mBAAoBqB,GAA+BiB,QAMtDO,GAA4B,SAAC5B,EAAsBF,EAAYC,GACxE,IAAM8B,EAAmB7B,EAAqB6B,iBAE9C,IAAK,IAAMjB,KAAUiB,EAAkB,CACnC,IACMd,EADYc,EAAiBjB,GACN5e,OACvBke,EAAsBH,EAAe/d,OAAOme,SAC5C2B,GAAwB/B,EAAeoB,WAAWW,uBACpD/B,EAAeoB,WAAWW,sBAAsBf,EAAYhB,EAAe/d,QAC/E,GAAI+e,EAAWZ,WAAaD,GAAuB4B,EAAuB,CACtE,IAAMC,EAAgBhB,EAAWhI,SACjC6G,GAAyBmC,EAAejC,EAAY,CAChDE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXa,MAKFiB,GAAqB,SAAChC,GAA6C,IAAvBhe,EAAuB,uDAAd,GAAIyW,EAAU,aACxEwJ,SACEC,EAAkBlgB,EAAOkgB,gBACzBnJ,EAAW/W,EAAO+W,SAClB/mB,EAASgQ,EAAO4e,OAAhB,IAA0B5e,EAAOme,SAGnC8B,EADAC,EACkBlC,EAAqBS,eAErBT,EAAqB6B,iBAG1B,OAAb9I,SACOkJ,EAAgBjwB,GAEvBiwB,EAAgBjwB,GAAO,CACnBymB,QACAzW,WAQCmgB,GAAyB,SAACxF,EAAWC,EAAWwF,GACzD,IAAMC,EAAsB1F,EAAUnU,QAAO,SAACC,EAAKrI,GAM/C,MAL+B,WAA3BA,EAAMkiB,YAAYtxB,KAClByX,EAAIxN,KAAJ,MAAAwN,EAAA,GAAYmU,EAAUjT,QAAO,SAAAd,GAAA,OAA0C,IAA7BA,EAAU0Z,OAAOniB,QACpDA,KAASgiB,GAChB3Z,EAAIxN,KAAKmF,GAENqI,IACR,IACH,OAAO5K,MAAMC,KAAK,IAAI+S,IAAIwR,IAAsBrlB,KAAI,SAAAoD,GAAA,OAASA,EAAMuQ,WAU1D3P,GAAwB,SAACZ,EAAO1O,GACzC,OAAI0O,EAAMkQ,aACClQ,EAAMkQ,cAANlQ,CAAqB1O,GAEzBA,G,0PC/II8wB,G,WA/hBX,c,4FAAwB,SACpB,IAAIC,SAEJ5tB,KAAK0qB,QAAU,KACf1qB,KAAK+jB,YAAc,GACnB/jB,KAAKskB,oBAAsB,GAC3BtkB,KAAKuqB,UAAY,GANG,2BAARV,EAAQ,qBAARA,EAAQ,gBAQE,IAAlBA,EAAOloB,SAAkBisB,EAAS/D,EAAO,cAAe8D,GAExD3tB,KAAK0W,eAAiBkX,EAAOlX,eAC7B1W,KAAK2Q,YAAcid,EAAOjd,YAC1B3Q,KAAKupB,YAAcqE,EAAOrE,YAC1BvpB,KAAK0qB,QAAUkD,EACf5tB,KAAKopB,mBAAqBppB,KAAK0qB,QAAQtB,mBACvCppB,KAAK6tB,gBAAkBvjB,IACvBtK,KAAKia,wBAAwBkM,0BAE7ByC,GAAUA,cAAC5oB,MAAX,OAAoB6pB,IACpB7pB,KAAK6tB,gBAAkB7tB,KAAKopB,mBAAmBjtB,KAC/C6D,KAAKia,wBAAwBkM,wBAC7BnmB,KAAK8tB,sBAAwB,CACzBlC,eAAgB,GAChBoB,iBAAkB,K,+CA0B1B,OAAOhtB,KAAKoQ,gBAAgBvH,OAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEyP,c,gCAY9C,OAAO3L,KAAK6tB,kB,sCAIZ,OAAO7tB,KAAK+tB,c,8CAMZ,OAFA/tB,KAAK+tB,YAAc5K,GAAa,CAACnjB,KAAK2Q,YAAa3Q,KAAK0W,gBACnD1W,KAAK8lB,uBAAwB9lB,KAAK6tB,iBAChC7tB,O,6CAIP,OAAOA,KAAKopB,qB,2BAiCV4E,EAAUje,GACZ,OAAOH,EAAa5P,KAAMguB,EAAUje,K,kCAuB3Bie,GACT,OAAOpe,EAAa5P,KAAMguB,EAAU9T,GAAkBla,KAAMguB,IAAW,K,4BAqBpEC,GACH,OAAO3T,GAAMta,KAAMiuB,K,iCAoBXC,GACR,OAAO7X,GAAWrW,KAAMkuB,K,6BAkDpBtI,EAAUzY,GACd,IAAMghB,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAMf,OAJAta,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,IAC/BpQ,KAAOoQ,EAAOpQ,MAAQoxB,EAAUpxB,KDkChB,SAACqnB,EAAUwB,EAAUgC,EAAcwG,GAC9D,IAAIC,EAAe,GAEbtxB,EAAS6qB,EAAT7qB,KAEAirB,EAAS5D,EAAS8B,MAAMkI,EAAY3G,WACpC6G,EAAmB5I,GACrBsC,EACApC,EACAgC,EACAxD,EACAU,IAEEL,EAAYD,GAAcznB,GAAM0nB,UAItC,OAFAkD,GAAqBK,EAAQsG,EAAiB7J,EAAU,IAAKL,EAAUwD,EAAchC,GAEjFnB,EAAU9iB,OAAS,GACnB0sB,EAAejK,EAAS8B,MAAMkI,EAAY3G,WAC1CE,GAAqB0G,EAAcC,EAAiB7J,EAAU,IAAKL,EAAUwD,EAAchC,GACpF,CAACoC,EAAQqG,IAGbrG,ECtDIuG,CACHvuB,KACA4lB,EACAzY,EAJgB,CAAEsa,UAAWta,EAAOsa,c,gCA4BxC,OAAQznB,KAAK2Q,YAAYhP,SAAW3B,KAAK0W,eAAe/U,S,8BAUnC,IAAlB8lB,IAAkB,yDACf9B,EAAW,IAAI3lB,KAAKytB,YAAYztB,MAMtC,OALIynB,EACA9B,EAAS6I,UAAUxuB,MAEnB2lB,EAAS6I,UAAU,MAEhB7I,I,8BA8CFmC,EAAW3a,GAChB,IAAMghB,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAEfta,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,GACtC,IAAMogB,EAAcvtB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKunB,GACtBxwB,EAASoQ,EAATpQ,KACFywB,EAAsBF,GAAuBxF,EAAWC,EAAWwF,GAErE9U,SAEA1b,IAAS8B,EAAcG,IASvByZ,EAAY,CARUoP,GAAiB7nB,KAAMwtB,EAAqB,CAC9DzwB,KAAM8B,EAAcC,OACpB2oB,UAAWta,EAAOsa,WACnBM,GACkBF,GAAiB7nB,KAAMwtB,EAAqB,CAC7DzwB,KAAM8B,EAAcE,QACpB0oB,UAAWta,EAAOsa,WACnBM,IAIHtP,EADsBoP,GAAiB7nB,KAAMwtB,EAAqBrgB,EAAQ4a,GAI9E,OAAOtP,I,wCAIP,OAAOzY,KAAKyuB,e,8CAWZ,OAPAzuB,KAAKyuB,aAAezuB,KAAK+tB,YAAYllB,OAAO8K,QAAO,SAACC,EAAK8a,EAAU9yB,GAK/D,OAJAgY,EAAI8a,EAASvyB,QAAU,CACnB+F,MAAOtG,EACP+yB,IAAKD,EAAS/iB,UAEXiI,IACR,IACI5T,O,gCAWPA,KAAK0qB,SAAW1qB,KAAK0qB,QAAQkE,YAAY5uB,MACzCA,KAAK0qB,QAAU,KACf1qB,KAAKuqB,UAAUzhB,SAAQ,SAAC0hB,GACpBA,EAAME,QAAU,QAEpB1qB,KAAKuqB,UAAY,K,kCA6BRC,GACT,IAAI3W,EAAM7T,KAAKuqB,UAAUtV,WAAU,SAAA4Z,GAAA,OAAWA,IAAYrE,MACjD,IAAT3W,GAAa7T,KAAKuqB,UAAU/iB,OAAOqM,EAAK,K,gCAQjCib,GACP9uB,KAAK0qB,SAAW1qB,KAAK0qB,QAAQkE,YAAY5uB,MACzCA,KAAK0qB,QAAUoE,EACfA,GAAUA,EAAOvE,UAAUnkB,KAAKpG,Q,kCA4BhC,OAAOA,KAAK0qB,U,oCA6BZ,OAAO1qB,KAAKuqB,Y,uCA4BZ,OAAOvqB,KAAK+jB,c,+CA4BZ,OAAO/jB,KAAKskB,wB,uwBCwSL1mB,G,YAtxBX,aAAsB,O,4FAAA,oCAANsJ,EAAM,qBAANA,EAAM,sB,iKAAA,2EACTA,KADS,OAGlB,EAAK6nB,eAAiB,GAHJ,E,0WAgFbloB,GAQLA,EAAUvK,OAAO8Q,OAAO,GAPL,CACf4hB,MAAO,MACP3sB,UAAW,KACX4sB,SAAS,EACTC,cAAc,EACdld,KAAM,IAE8BnL,GACxC,IAAMgC,EAAS7I,KAAK8lB,uBAAuBjd,OAErCsmB,EAAgBxZ,GAAY5Z,KAC9BiE,KACAA,KAAK8lB,uBAAuBjd,OAC5B7I,KAAK2Q,YACL9J,EAAQqoB,aAAermB,EAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEC,UAAQ8G,OAASjD,KAAK0W,eAC/D7P,EAAQmL,KACR,CACI8D,WAA8B,WAAlBjP,EAAQmoB,MACpBnZ,SAAUhP,EAAQooB,UAI1B,IAAKpoB,EAAQxE,UACT,OAAO8sB,EAxBG,IA2BN9sB,EAAcwE,EAAdxE,UACA0I,EAAuBokB,EAAvBpkB,KAAMY,EAAiBwjB,EAAjBxjB,OAAQ8J,EAAS0Z,EAAT1Z,KAChB2Z,EAAazjB,EAAOxD,KAAK,SAAA9E,GAAA,OAAKA,EAAElH,QAEhCkzB,EADgB/yB,OAAO0J,KAAK3D,GACAsR,QAAO,SAACC,EAAK5F,GAC3C,IAAM6F,EAAMub,EAAWjpB,QAAQ6H,GAI/B,OAHa,IAAT6F,GACAD,EAAIxN,KAAK,CAACyN,EAAKxR,EAAU2L,KAEtB4F,IACR,IAgCH,MA9BsB,WAAlB/M,EAAQmoB,MACRK,EAAYvmB,SAAQ,SAACwmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBvkB,EAAKwkB,GAAMzmB,SAAQ,SAACoK,EAAOuc,GACvB1kB,EAAKwkB,GAAME,GAAYD,EAAMzzB,UACzBmE,EACAgT,EACAuC,EAAKga,GACL9jB,EAAO4jB,UAKnBxkB,EAAKjC,SAAQ,SAACoK,EAAOuc,GACjBJ,EAAYvmB,SAAQ,SAACwmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBpc,EAAMqc,GAAQC,EAAMzzB,UAChBmE,EACAgT,EAAMqc,GACN9Z,EAAKga,GACL9jB,EAAO4jB,UAMhBJ,I,gCASP,IAAMxiB,EAAa3M,KAAK2Q,YAClB+e,EAAM,GAER/iB,EAAWhL,QACMgL,EAAWE,MAAM,KAEzB/D,SAAQ,SAACuK,GAAQ,MACHA,EAAIxG,MAAM,KAAK1E,IAAIO,QADhB,UACjBsE,EADiB,KACVC,EADU,KAGtBA,OAAc/M,IAAR+M,EAAoBA,EAAMD,EAChC0iB,EAAItpB,KAAJ,MAAAspB,EAAA,GAAY1mB,MAAMiE,EAAMD,EAAQ,GAAG2iB,OAAOxnB,KAAI,SAAC2R,EAAGjG,GAAJ,OAAY7G,EAAQ6G,UAI1E,OAAO6b,I,8BA0BFE,GAAwD,IAA7ClX,EAA6C,uDAAlC,GAAIvL,EAA8B,uDAArB,CAAEsa,WAAW,GAC/CqC,EAAgBA,GAAG8F,EAAU3sB,OAC/B4mB,EAAS,CAAC7pB,KAAM4vB,EAAWlX,GACzBiB,EAAenB,gBAAWqR,GAgBhC,OAdAtF,GACIvkB,KACA2Z,EACApL,EAAeG,QACf,CAAEkhB,YAAW9F,gBAAe7Q,eAAgBV,GAAaU,kBACzDP,GAGAvL,EAAOsa,UACP9N,EAAa6U,UAAUxuB,MAEvB2Z,EAAa6U,UAAU,MAGpB7U,I,2BAsDL5F,GAA+C,IAA/B5G,EAA+B,uDAAtB,CAAEsa,WAAW,GAClCxE,EAAUjjB,KAAK+mB,QAAQ,CACzBiI,MAAO,MACPhd,KAAM+B,IAEJoK,EAAS8E,EAAQtX,OAAOxD,KAAI,SAAAoD,GAAA,OAASA,EAAMpP,QAC3C0zB,EAAe,CAAC1R,GAAQ8C,OAAOgC,EAAQlY,MAEvC+kB,EAAW,IAAI9vB,KAAKytB,YAAYoC,EAAc5M,EAAQtX,OAAQ,CAAEmR,WAAY,WAgBlF,OAdAyH,GACIvkB,KACA8vB,EACAvhB,EAAeO,KACf3B,EACA4G,GAGA5G,EAAOsa,UACPqI,EAAStB,UAAUxuB,MAEnB8vB,EAAStB,UAAU,MAGhBsB,I,gCAwBAlkB,EAAM/E,GACb+E,EAAOA,GAAQ5L,KAAKupB,YACpB1iB,EAAUvK,OAAO8Q,OAAO,GAAI,CAAE6U,eAAgB,KAAOpb,GAErD,IAAMgC,EAAS7I,KAAKoQ,gBAAgBvH,OAC9BknB,EAAUlnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAEnD,mBAC5B8e,EAAYD,EAAQ,GAAGpuB,OACzBsuB,SACAC,SACAC,SAEJ,GAAIvkB,IAAS7N,EAAWC,UAEpB,IADAiyB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAM5a,EAAM,GACZ,IAAK6a,EAAS,EAAGA,EAAStnB,EAAOlH,OAAQwuB,IACrC7a,EAAIzM,EAAOsnB,GAAQh0B,QAAU4zB,EAAQI,GAAQD,GAEjDD,EAAe7pB,KAAKkP,QAErB,GAAI1J,IAAS7N,EAAWE,QAAS,CAEpC,IADAgyB,EAAiB,CAACpnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK4D,EAAQob,iBACpDiO,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAM5a,EAAM,GACZ,IAAK6a,EAAS,EAAGA,EAAStnB,EAAOlH,OAAQwuB,IACrC7a,EAAIlP,KAAK2pB,EAAQI,GAAQD,IAE7BD,EAAe7pB,KAAKkP,EAAIrS,KAAK4D,EAAQob,iBAEzCgO,EAAiBA,EAAehtB,KAAK,UAClC,IAAI2I,IAAS7N,EAAWG,QAU3B,MAAM,IAAIuS,MAAJ,aAAuB7E,EAAvB,qBARN,IADAqkB,EAAiB,CAACpnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,WAC/B+zB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAM5a,EAAM,GACZ,IAAK6a,EAAS,EAAGA,EAAStnB,EAAOlH,OAAQwuB,IACrC7a,EAAIlP,KAAK2pB,EAAQI,GAAQD,IAE7BD,EAAe7pB,KAAKkP,IAM5B,OAAO2a,I,+BAGD1kB,GACN,IAAMyI,EAAYzI,EAAMpP,OACxB6D,KAAK0W,gBAAL,IAA2B1C,EAC3B,IAAMoP,EAAoBpjB,KAAKopB,mBACzBgH,EAAqBhN,EAAkBkG,oBACvCpY,EAAgB3F,EAAM2F,gBACtB+R,EAAU1X,EAAM0F,aAAalG,KAEnC,GAAKqY,EAAkB/X,YAAYE,EAAMpP,QAKlC,CACH,IAAM4M,EAAaqa,EAAkBva,OAAOoM,WAAU,SAAAob,GAAA,OAAaA,EAAUl0B,SAAW6X,KACxFjL,GAAc,IAAMqa,EAAkBva,OAAOE,GAAcwC,QAN3D6X,EAAkBva,OAAOzC,KAAKmF,GAC9B6kB,EAAmBtnB,SAAQ,SAACV,EAAKxM,GAC7BwM,EAAImD,EAAMpP,QAAU,IAAI6P,EAAMkF,EAActV,GAAIqnB,EAAQrnB,GAAI2P,MAapE,OALA6X,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KAEnC1L,KAAKia,wBAAwBkM,wBACtBnmB,O,wCAuCQ2L,EAAQ2kB,EAAYnjB,GAAQ,WAC3CxB,EAAS2c,GAAmB3c,GAC5BwB,EAAS7Q,OAAO8Q,OAAO,GAAI,CAAEqa,WAAW,EAAM8I,YAAY,GAASpjB,GAEnE,IAAM6Z,EAAehnB,KAAK4mB,kBACpB4J,EAAUF,EAAWlc,MAAM,EAAGkc,EAAW3uB,OAAS,GAClD8uB,EAAaH,EAAWA,EAAW3uB,OAAS,GAElD,GAAIqlB,EAAarb,EAAOxP,QAAUgR,EAAOojB,WACrC,MAAM,IAAI9f,MAAS9E,EAAOxP,KAApB,sCAGV,IAAMu0B,EAAkBF,EAAQroB,KAAI,SAACoD,GACjC,IAAMolB,EAAY3J,EAAazb,GAC/B,IAAKolB,EAED,MAAM,IAAIlgB,MAASlF,EAAb,gCAEV,OAAOolB,EAAUzuB,SAGfgkB,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAE1BmJ,EAAK1K,EAAM9V,gBAAgBvH,OAC3BgoB,EAAiBH,EAAgBvoB,KAAI,SAAA0L,GAAA,OAAO+c,EAAG/c,MAEjDkG,EAAc,GACdC,EAAgB,kBAAM,EAAK1I,gBAEzBwf,EAAiB,GACvBpkB,EAAmBwZ,EAAMvV,aAAa,SAAC/U,GACnC,IAAMm1B,EAAaF,EAAe1oB,KAAI,SAAAoD,GAAA,OAASA,EAAM0F,aAAalG,KAAKnP,MACvEk1B,EAAel1B,GAAK60B,kBAAcM,GAAd,QAA0Bn1B,EAAGoe,EAAeD,QAhCzB,MAkC3BgE,GAAa,CAAC+S,GAAiB,CAACnlB,GAAS,CAACA,EAAOxP,OAA1DoP,EAlCoC,WA6C3C,OAVA2a,EAAM8K,SAASzlB,GAEfgZ,GACIvkB,KACAkmB,EACA3X,EAAeK,QACf,CAAEzB,OAAQxB,EAAQ9C,OAAQ2nB,GAC1BC,GAGGvK,I,gCAWA8E,GAA2D,IAA9C7d,EAA8C,uDAArC,GAAI8jB,EAAiC,aAAjB3E,EAAiB,uDAAJ,GACxDe,EAAkBlgB,EAAOkgB,gBACzBhC,EAAsBle,EAAOme,SAC7B4F,EAAU/jB,EAAO+jB,QACjB7E,EAAYzB,GAAiB5qB,MAC7BmrB,EAAuBkB,EAAUyB,sBACjCtB,EAAmB/B,GAAoBzqB,MACvCirB,EAAa,CACfwB,aAAcD,EACd5I,MAAOyI,GAgBX,OAbA4E,GAAkB9D,GAAmBhC,EAAsBhe,EAAQnN,MACnE+qB,GAAyBC,EAAaC,EAAY,CAAEE,uBAAsBG,SAAUD,GAChF/uB,OAAO8Q,OAAO,CACV8jB,WACD/jB,IAEHkgB,GACAN,GAA0B5B,EAAsBF,EAAY,CACxD9d,SACAmf,eAIDtsB,O,yBAUPmxB,EAAWvkB,GACX,OAAQukB,GACR,I5CplBmB,c4CqlBfnxB,KAAK+uB,eAAe3oB,KAAKwG,GAG7B,OAAO5M,O,kCASEmxB,GACT,OAAQA,GACR,I5CnmBmB,c4ComBfnxB,KAAK+uB,eAAiB,GAI1B,OAAO/uB,O,wCAUQ6mB,EAAWqK,GAAS,WACflxB,KAAK+uB,eACXjmB,SAAQ,SAAAud,GAAA,OAAMA,EAAGtqB,KAAK,EAAM8qB,EAAWqK,Q,0BA8CpDE,EAAkBjkB,GACnB,IAAM6Z,EAAehnB,KAAK4mB,kBAE1B,IAAKI,EAAaoK,GACd,MAAM,IAAI3gB,MAAJ,SAAmB2gB,EAAnB,kBAGV,IAAMC,EAAelkB,EAAOhR,MAAWi1B,EAAlB,UAErB,GAAIpK,EAAaqK,GACb,MAAM,IAAI5gB,MAAJ,SAAmB4gB,EAAnB,mBAGV,IAb2B,E7CvnB5B,SAAgCC,EAAc3kB,EAAYQ,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAASwjB,EAAmCpkB,EAAnCokB,UAAWzjB,EAAwBX,EAAxBW,QAASd,EAAeG,EAAfH,MAAOC,EAAQE,EAARF,IAD2B,EAEhDqkB,EAAarV,SAFmC,SAE9DuV,EAF8D,KAExDC,EAFwD,KAIhE1jB,IACDf,EAAmB,IAAVA,KAAiBA,GAASA,EAAQwkB,GAASA,EAAOxkB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMwkB,GAAUA,EAAO,EAAKxkB,EAErDskB,IACAzjB,EAAU9J,KAAK0tB,KAAK1tB,KAAK2tB,IAAI1kB,EAAMD,GAASukB,IAGhDxjB,EAAUF,EAAgBC,EAASd,EAAOC,IAG1Cc,EAAQ,GAAKyjB,GACbzjB,EAAQpG,QAAQ6pB,GAEhBzjB,EAAQA,EAAQpM,OAAS,IAAM8vB,GAC/B1jB,EAAQ3H,KAAKqrB,EAAO,GAIxB,IADA,IAAMvjB,EAAe,GACZtS,EAAI,EAAGA,EAAImS,EAAQpM,OAAS,EAAG/F,IACpCsS,EAAa9H,KAAK,CACd4G,MAAOe,EAAQnS,GACfqR,IAAKc,EAAQnS,EAAI,KAIzB,IAAMg2B,EAAa,GAYnB,OAXAllB,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMsX,EAAQoe,EAAargB,aAAalG,KAAKnP,GAC7C,GAAIsX,aAAiBhG,EACjB0kB,EAAWxrB,KAAK8M,OADpB,CAKA,IAAM1R,EAAQyM,EAAgBC,EAAcgF,GAC5C0e,EAAWxrB,KAAQ5E,EAAMwL,MAAzB,IAAkCxL,EAAMyL,SAGrC,CAAE2kB,aAAYvU,KAAMtP,G6C2lBM8jB,CADR7xB,KAAKoQ,gBAAgB/E,YAAY+lB,GACWpxB,KAAK2Q,YAAaxD,GAA3EykB,EAdmB,EAcnBA,WAAYvU,EAdO,EAcPA,KAEdyU,EAAW/T,GAAa,CAAC6T,GAAa,CACxC,CACIz1B,KAAMk1B,EACNzlB,KAAMlN,EAAUE,UAChBkc,QAAS1c,EAAiBG,OAC1B8e,SACA,CAACgU,IAAe,GAElBnL,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAWhC,OAVAvB,EAAM8K,SAASc,GAEfvN,GACIvkB,KACAkmB,EACA3X,EAAeM,IACd,CAAEuiB,mBAAkBjkB,SAAQkkB,gBAC5B,MAGEnL,I,qCA8BP,OAAO,IAAItoB,EAHEoC,KAAK+xB,UAAUh0B,EAAWC,WACxBgC,KAAKgyB,e,iCA+CZvY,EAAcL,EAAWjM,GACjC,IAAM6Z,EAAehnB,KAAK4mB,kBAE1BnN,EAAa3Q,SAAQ,SAACkL,GAClB,IAAKgT,EAAahT,GACd,MAAM,IAAIvD,MAAJ,SAAmBuD,EAAnB,mCAId,IAAMma,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAKf,OF5iBuB,SAACrD,EAAU3K,GAAiD,IAAnCL,EAAmC,uDAAvB,SAAAjY,GAAA,OAAOA,GAAKgM,EAAW,aAEnFsa,EACAta,EADAsa,UAEElO,EAAgB6K,EAAShU,gBAAgB/E,YAJwC,EASnFqa,GACAtB,EAAS8B,MAAMuB,GACfrO,EACAjM,EACAiX,GACA,sCAAIyF,EAAJ,qBAAIA,EAAJ,uBAAevE,GAAuBA,aAAIuE,EAA3B,QAAmCpQ,EAAcF,QAPhEgM,EAPmF,EAOnFA,gBACAC,EARmF,EAQnFA,aASEyM,EAAY,GAoBlB,OAnBA31B,OAAO0J,KAAKuf,GAAiBvT,OAAOlJ,SAAQ,SAACzF,GACzC,GAAIkiB,EAAgBliB,GAAI,CACpB,IAAM2kB,EAAS5D,EAAS8B,MAAMuB,GACxBmC,EAAapE,EAAaniB,GAChC2kB,EAAOrX,YAAc4U,EAAgBliB,GAAGJ,KAAK,KAC7C+kB,EAAO/N,wBAAwBkM,wBAI3BsB,GACAlD,GAAmBH,EAAU4D,EAAQzZ,EAAeC,OAAQrB,GAHtC,SAAAtE,GAAA,OAAU4Q,EAAa8N,OAAM,SAAAzN,GAAA,OAAKjR,EAAOiR,GAAGO,gBAAkBuP,EAAW5jB,KAAK8T,SAKxGkO,EAAOjE,YAAYiE,EAAOjE,YAAYpiB,OAAS,GAAGsiB,KAAOuB,EAAaniB,GAEtE4uB,EAAU7rB,KAAK4hB,OAKhBiK,EEugBIC,CAAgBlyB,KAAMyZ,EAAcL,EAF3CjM,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,M,sCAyCmB,IAA9CglB,EAA8C,uDAA/B,GAAIC,EAA2B,uDAAZ,GAAIjlB,EAAQ,aACnDghB,EAAY,CACdpxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAET8F,EAAcvtB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKunB,GACxB8E,EAA0B,CAAC,CAACD,IAalC,OAXAjlB,EAAS7Q,OAAO8Q,OAAO,GAAI+gB,EAAWhhB,IACtCglB,EAAeA,EAAaxwB,OAASwwB,EAAe,CAAC,KAGxCrpB,SAAQ,SAACwpB,EAAU12B,GAC5By2B,EAAwBz2B,GAAK0xB,GAAuBA,GAADA,UAC3CgF,GADqB,GACRF,IACjBrK,EACAwF,MAGDpF,GAAiBnoB,KAAMqyB,EAAyBllB,EAAQ4a,M,kDApuBhC5a,GAC/B,OAAOD,EAAkBI,iBAAiBH,K,+BA7B1C,OAAOoL,K,iCAOP,OAAOwK,K,iCAOP,OAAOjF,O,GAnES6P,ICxCT1W,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKK,GAA0CF,GAA1CE,IAAKC,GAAqCH,GAArCG,IAAK6a,GAAgChb,GAAhCgb,MAAOC,GAAyBjb,GAAzBib,KAAMC,GAAmBlb,GAAnBkb,MAAYC,GAAOnb,GAAZob,ICyBjDC,GAAY,CACdC,QC2LmB,sCAAIC,EAAJ,qBAAIA,EAAJ,uBACnB,SAAClc,GAAqC,IAAjCzJ,EAAiC,uDAAxB,CAAEsa,WAAW,GACnBsL,EAAYnc,EACZoc,SACEvJ,EAAc,GA8BpB,OA5BAqJ,EAAWhqB,SAAQ,SAAC+a,GAChBkP,EAAYlP,EAAUkP,GACtBtJ,EAAYrjB,KAAZ,MAAAqjB,EAAA,EAAoBsJ,EAAUhP,cACzBiP,IACDA,EAAaD,MAIjBC,GAAcA,IAAeD,GAC7BC,EAAWC,UAIfF,EAAUzO,oBAAsB,GAChCC,GACI3N,EACAmc,EACAxkB,EAAeI,QACf,KACA8a,GAGAtc,EAAOsa,UACPsL,EAAUvE,UAAU5X,GAEpBmc,EAAUvE,UAAU,MAGjBuE,ID5NXG,ICyHe,sCAAIhsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGsc,IAAH,MAAAtc,EAAU1P,KDxH5CsgB,OC6BkB,sCAAItgB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4Q,OAAH,MAAA5Q,EAAa1P,KD5BlDisB,QC4DmB,sCAAIjsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGuc,QAAH,MAAAvc,EAAc1P,KD3DpDsR,QCmJmB,sCAAItR,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4B,QAAH,MAAA5B,EAAc1P,KDlJpDksB,kBE1B6B,sCAAIlsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGwc,kBAAH,MAAAxc,EAAwB1P,KF2BxE8K,KElBgB,sCAAI9K,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG5E,KAAH,MAAA4E,EAAW1P,KFmB9C0I,eACAyG,cACAgd,YGlCG,SAAsB7Y,EAAYC,GACrC,OAAO7K,EAAa4K,EAAYC,EAAYP,GAAkBM,EAAYC,IAAa,IHkCvFF,iBACAG,kBACA4Y,clC3BG,SAAwB9Y,EAAYC,EAAY1K,GACnD,OAAOuK,GAAMC,GAAcC,EAAYC,EAAY1K,GAAW2K,GAAeF,EAAYC,EAAY1K,KkC2BrGuK,SACA5N,sBAGE6mB,G,KAAcA,QACpBj3B,OAAO8Q,OAAOxP,GAAW,CACrBg1B,aACAY,QACAjlB,iBACAzO,oBACA/B,aACAc,gBACAqO,oBACAqmB,WACAjV,iBACAmV,iBACDC,GAEY91B","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n // constructor (schema) {\n // super();\n // this.schema = schema;\n // this._dtf = new DateTimeFormatter(format);\n // }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from './model/dataConverter';\nimport DSVStringConverter from './defaultConverters/dsvStringConverter';\nimport JSONConverter from './defaultConverters/jsonConverter';\nimport DSVArrayConverter from './defaultConverters/dsvArrayConverter';\nimport AutoDataConverter from './defaultConverters/autoCoverter';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n *\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/package.json b/package.json index c723642..3133582 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "datamodel", "description": "Relational algebra compliant in-memory tabular data store", "homepage": "https://github.com/chartshq/datamodel", - "version": "2.2.1", + "version": "2.2.1-alpha", "license": "MIT", "main": "dist/datamodel.js", "keywords": [ diff --git a/src/export.js b/src/export.js index 422c3ab..7767443 100644 --- a/src/export.js +++ b/src/export.js @@ -13,7 +13,8 @@ import { leftOuterJoin, rightOuterJoin, fullOuterJoin, - union + union, + rowDiffsetIterator } from './operator'; import * as Stats from './stats'; import * as enums from './enums'; @@ -38,7 +39,8 @@ const Operators = { leftOuterJoin, rightOuterJoin, fullOuterJoin, - union + union, + rowDiffsetIterator }; const version = pkg.version; diff --git a/src/fields/index.js b/src/fields/index.js index 71510c6..c62b24f 100644 --- a/src/fields/index.js +++ b/src/fields/index.js @@ -2,3 +2,4 @@ export { default as Dimension } from './dimension'; export { default as Measure } from './measure'; export { default as FieldParser } from './parsers/field-parser'; export { default as fieldRegistry } from './field-registry'; +export { columnMajor } from '../utils'; From bbc4f1dcb93c5e0ae0a7cc78b7442d50c1a7760e Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Mon, 14 Oct 2019 15:31:32 +0530 Subject: [PATCH 14/20] Field Registry --- src/helper.js | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/src/helper.js b/src/helper.js index 8562f16..af4b889 100644 --- a/src/helper.js +++ b/src/helper.js @@ -8,6 +8,7 @@ import { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import defaultConfig from './default-config'; import { converterStore } from './converter'; +import { fieldRegistry } from './fields'; import { extend2, detectDataFormat } from './utils'; /** @@ -374,27 +375,12 @@ export const sanitizeUnitSchema = (unitSchema) => { }; export const validateUnitSchema = (unitSchema) => { - const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS]; - const supportedDimSubTypes = [ - DimensionSubtype.CATEGORICAL, - DimensionSubtype.BINNED, - DimensionSubtype.TEMPORAL, - DimensionSubtype.GEO - ]; const { type, subtype, name } = unitSchema; - - switch (type) { - case FieldType.DIMENSION: - if (supportedDimSubTypes.indexOf(subtype) === -1) { - throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`); - } - break; - case FieldType.MEASURE: - if (supportedMeasureSubTypes.indexOf(subtype) === -1) { + if (type === FieldType.DIMENSION || type === FieldType.MEASURE) { + if (!fieldRegistry.has(subtype)) { throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`); } - break; - default: + } else { throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`); } }; From d4a932bea5cbe0da1f80101b56d6568230e4258b Mon Sep 17 00:00:00 2001 From: Ujjal Kumar Dutta Date: Tue, 15 Oct 2019 16:35:01 +0530 Subject: [PATCH 15/20] Review Comments --- dist/datamodel.js | 2 +- dist/datamodel.js.map | 2 +- src/converter/dataConverterStore.js | 11 ++++------- .../{autoCoverter.js => autoConverter.js} | 0 src/converter/defaultConverters/index.js | 4 ++++ src/fields/parsers/temporal-parser/index.js | 11 ----------- 6 files changed, 10 insertions(+), 20 deletions(-) rename src/converter/defaultConverters/{autoCoverter.js => autoConverter.js} (100%) create mode 100644 src/converter/defaultConverters/index.js diff --git a/dist/datamodel.js b/dist/datamodel.js index bdf7d9a..b7b037f 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var i=t[r]={i:r,l:!1,exports:{}};return e[r].call(i.exports,i,i.exports,n),i.l=!0,i.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)n.d(r,i,function(t){return e[t]}.bind(null,i));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports=JSON.parse('{"name":"datamodel","description":"Relational algebra compliant in-memory tabular data store","homepage":"https://github.com/chartshq/datamodel","version":"2.2.1-alpha","license":"MIT","main":"dist/datamodel.js","keywords":["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],"author":"Muzejs.org (https://muzejs.org/)","repository":{"type":"git","url":"https://github.com/chartshq/datamodel.git"},"contributors":[{"name":"Akash Goswami","email":"akashgoswami90s@gmail.com"},{"name":"Subhash Haldar"},{"name":"Rousan Ali","email":"rousanali786@gmail.com","url":"https://rousan.io"},{"name":"Ujjal Kumar Dutta","email":"duttaujjalkumar@live.com"}],"dependencies":{"d3-dsv":"^1.0.8"},"devDependencies":{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0","chai":"3.5.0","cross-env":"^5.0.5","eslint":"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0","jsdoc":"3.5.5","json2yaml":"^1.1.0","karma":"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3","marked":"^0.5.0","mocha":"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0","webpack":"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},"scripts":{"test":"npm run lint && npm run ut","ut":"karma start karma.conf.js","utd":"karma start --single-run false --browsers Chrome karma.conf.js ","build":"npm run build:prod","build:dev":"webpack --mode development","build:prod":"webpack --mode production","start":"webpack-dev-server --config webpack.config.dev.js --mode development --open","lint":"eslint ./src","lint-errors":"eslint --quiet ./src","docs":"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}')},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",(function(){return o})),n.d(r,"DimensionSubtype",(function(){return u})),n.d(r,"MeasureSubtype",(function(){return c})),n.d(r,"FieldType",(function(){return f})),n.d(r,"FilteringMode",(function(){return l})),n.d(r,"GROUP_BY_FUNCTIONS",(function(){return s}));var i={};n.r(i),n.d(i,"Dimension",(function(){return Ce})),n.d(i,"Measure",(function(){return Me})),n.d(i,"FieldParser",(function(){return Ue})),n.d(i,"fieldRegistry",(function(){return it})),n.d(i,"columnMajor",(function(){return v}));var a={};n.r(a),n.d(a,"sum",(function(){return jn})),n.d(a,"avg",(function(){return An})),n.d(a,"min",(function(){return kn})),n.d(a,"max",(function(){return Dn})),n.d(a,"first",(function(){return Sn})),n.d(a,"last",(function(){return Tn})),n.d(a,"count",(function(){return Fn})),n.d(a,"sd",(function(){return Nn}));var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function p(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,i=void 0;if(!n)return t;var a=n.toLowerCase();for(i=0,r=e.length;ii.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,a=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};y.forEach((function(e,t){w[a[t].name]=e})),p&&L.CROSS!==i?o[h]=w:(o.push(w),p=!0,h=n)}else if((i===L.LEFTOUTER||i===L.RIGHTOUTER)&&!p){var _={},O=c.fields.length-1;y.forEach((function(e,t){_[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(_)}}))})),new En(o,a,{name:p})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=un(t,r))&&("function"==typeof i?z(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=q(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return un(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(a,i);z(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var Q,ee=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=K(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&$(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!un(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);$(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return un(t,e)}));var i=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Oe.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Oe.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=be)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new Cn(h,p,{name:c}),v}function Ae(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new Cn(a,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,L.LEFTOUTER)}function Se(e,t,n){return G(t,e,n,!1,L.RIGHTOUTER)}var Te=function(){function e(e,t){for(var n=0;nn&&(n=i))})),[t,n]}}],[{key:"parser",value:function(){return new $e}}]),t}(Me),tt=function(){function e(e,t){for(var n=0;n9999?"+"+yt(t,6):yt(t,4))+"-"+yt(e.getUTCMonth()+1,2)+"-"+yt(e.getUTCDate(),2)+(a?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"."+yt(a,3)+"Z":i?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"Z":r||n?"T"+yt(n,2)+":"+yt(r,2)+"Z":"")}var bt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return lt;if(f)return f=!1,ft;var t,r,i=o;if(e.charCodeAt(i)===st){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===pt?f=!0:r===dt&&(f=!0,e.charCodeAt(o)===pt&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o0&&void 0!==arguments[0]?arguments[0]:[];return t.forEach((function(t){return e.store.set(t.type,t)})),this.store}},{key:"register",value:function(e){return e instanceof ct?(this.store.set(e.type,e),this):null}},{key:"unregister",value:function(e){return this.store.delete(e.type),this}},{key:"get",value:function(e){return this.store.has(e)?this.store.get(e):null}}]),e}(),Mt=function(){var e=null;return e||(e=new It)}(),xt=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function Ut(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Lt(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Lt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Gt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Lt(e._ancestorDerivation).concat(Lt(e._derivation)))},Jt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Gt(e,t)},zt=(Ut(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ut(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ut(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=xt(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return i(c,(function(e){return t(Vt(f,l,s,e),e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n){for(var r=n(e,t,0),i=1,a=e.length;i2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||V,a=n.filterByMeasure||!1,o=Zt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),i=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return i[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),p=r.data,d=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&p.forEach((function(e){var t=$t(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},p.length?function(e){var t=!c||h[$t(o,e,n)];return a?l.every((function(t){return e[t].internalValue>=d[t][0]&&e[t].internalValue<=d[t][1]}))&&t:t}:function(){return!1};var t,n,r,i,o,c,f,l,s,p,d,h})):[function(){return!1}];return i===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},en=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Jt(n,e,U.SELECT,{config:r},i)},tn=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Jt(e,i,U.PROJECT,{projField:t,config:n,actualProjField:a},null),i},nn=function(e,t,n,r){return t.map((function(t){return tn(e,t,n,r)}))},rn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},an=function(e){return e.map((function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,i=e.subtype,a=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(i))throw new Error("DataModel doesn't support dimension field subtype "+i+" used for "+a+" field");break;case f.MEASURE:if(-1===t.indexOf(i))throw new Error("DataModel doesn't support measure field subtype "+i+" used for "+a+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+a+" field")}}(e=rn(e)),e}))},on=function(e,t,n,r){n=an(n),r=Object.assign(Object.assign({},ot),r);var i=Mt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=xt(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=at(f,n,c),s=S.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=Vt(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},un=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.nonTraversingModel,o=i.excludeModels||[];if(t!==a){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=cn(n,t);e(t,a,r,i)}))}},ln=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==U.GROUPBY}));)e=e._parent;return e},sn=function(e){for(;e._parent;)e=e._parent;return e},pn=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},dn=function(e,t,n,r){var i=void 0,a=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],i=[];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter((function(e){return e.config.sourceId!==c})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:i,models:e.model,path:pn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Lt(d),[e])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Lt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(a=Qt(b,i,{filterByMeasure:f}),fn(b,a,m)),l.forEach((function(e){var t=Qt(y,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},yn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Lt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},mn=function(e,t){return e.numberFormat?e.numberFormat()(t):t},bn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),en(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=yn(e,i,r),u=void 0;a===l.ALL?u=[tn(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),tn(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=tn(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),wn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},_n=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=je.apply(void 0,i);return Jt(this,a,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Oe.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Jt(this,a,U.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new F(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=rn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,On(t).concat([e,p,s]))}));var h=at([d],[e],[e.name]),v=wn(h,1)[0];return c.addField(v),Jt(this,c,U.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=sn(this),c=u._propagationNameSpace,f=ln(this),l={groupByModel:f,model:u};return n&&vn(c,t,this),dn(e,l,{propagationNameSpace:c,sourceId:a},Object.assign({payload:o},t)),i&&hn(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=yn([].concat(On(e),On(t)),a,i)})),nn(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Oe}},{key:"Converters",get:function(){return Mt}},{key:"FieldTypes",get:function(){return it}}]),t}(gn),jn=me.sum,An=me.avg,kn=me.min,Dn=me.max,Sn=me.first,Tn=me.last,Fn=me.count,Nn=me.std,Rn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,B(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Jt(e,r,U.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;ni.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,a=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};y.forEach((function(e,t){w[a[t].name]=e})),p&&L.CROSS!==i?o[h]=w:(o.push(w),p=!0,h=n)}else if((i===L.LEFTOUTER||i===L.RIGHTOUTER)&&!p){var _={},O=c.fields.length-1;y.forEach((function(e,t){_[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(_)}}))})),new En(o,a,{name:p})}function G(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:G;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=un(t,r))&&("function"==typeof i?z(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=q(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return un(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(a,i);z(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var Q,ee=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=K(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&$(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!un(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);$(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return un(t,e)}));var i=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Oe.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Oe.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=be)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new Cn(h,p,{name:c}),v}function Ae(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new Cn(a,r,{name:l})}function De(e,t,n){return J(e,t,n,!1,L.LEFTOUTER)}function Se(e,t,n){return J(t,e,n,!1,L.RIGHTOUTER)}var Fe=function(){function e(e,t){for(var n=0;nn&&(n=i))})),[t,n]}}],[{key:"parser",value:function(){return new $e}}]),t}(Me),tt=function(){function e(e,t){for(var n=0;n9999?"+"+yt(t,6):yt(t,4))+"-"+yt(e.getUTCMonth()+1,2)+"-"+yt(e.getUTCDate(),2)+(a?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"."+yt(a,3)+"Z":i?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"Z":r||n?"T"+yt(n,2)+":"+yt(r,2)+"Z":"")}var bt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return lt;if(f)return f=!1,ft;var t,r,i=o;if(e.charCodeAt(i)===st){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===pt?f=!0:r===dt&&(f=!0,e.charCodeAt(o)===pt&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o0&&void 0!==arguments[0]?arguments[0]:[];return t.forEach((function(t){return e.store.set(t.type,t)})),this.store}},{key:"register",value:function(e){return e instanceof ct?(this.store.set(e.type,e),this):null}},{key:"unregister",value:function(e){return this.store.delete(e.type),this}},{key:"get",value:function(e){return this.store.has(e)?this.store.get(e):null}}]),e}(),Mt=function(){var e=null;return e||(e=new It)}(),xt=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function Ut(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Lt(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Lt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Jt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Lt(e._ancestorDerivation).concat(Lt(e._derivation)))},Gt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Jt(e,t)},zt=(Ut(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ut(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ut(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=xt(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return i(c,(function(e){return t(Vt(f,l,s,e),e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n){for(var r=n(e,t,0),i=1,a=e.length;i2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||V,a=n.filterByMeasure||!1,o=Zt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),i=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return i[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),p=r.data,d=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&p.forEach((function(e){var t=$t(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},p.length?function(e){var t=!c||h[$t(o,e,n)];return a?l.every((function(t){return e[t].internalValue>=d[t][0]&&e[t].internalValue<=d[t][1]}))&&t:t}:function(){return!1};var t,n,r,i,o,c,f,l,s,p,d,h})):[function(){return!1}];return i===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},en=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Gt(n,e,U.SELECT,{config:r},i)},tn=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Gt(e,i,U.PROJECT,{projField:t,config:n,actualProjField:a},null),i},nn=function(e,t,n,r){return t.map((function(t){return tn(e,t,n,r)}))},rn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},an=function(e){return e.map((function(e){return function(e){var t=e.type,n=e.subtype,r=e.name;if(t!==f.DIMENSION&&t!==f.MEASURE)throw new Error("DataModel doesn't support field type "+t+" used for "+r+" field");if(!it.has(n))throw new Error("DataModel doesn't support measure field subtype "+n+" used for "+r+" field")}(e=rn(e)),e}))},on=function(e,t,n,r){n=an(n),r=Object.assign(Object.assign({},ot),r);var i=Mt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=xt(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=at(f,n,c),s=S.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=Vt(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},un=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.nonTraversingModel,o=i.excludeModels||[];if(t!==a){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=cn(n,t);e(t,a,r,i)}))}},ln=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==U.GROUPBY}));)e=e._parent;return e},sn=function(e){for(;e._parent;)e=e._parent;return e},pn=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},dn=function(e,t,n,r){var i=void 0,a=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],i=[];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter((function(e){return e.config.sourceId!==c})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:i,models:e.model,path:pn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Lt(d),[e])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Lt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(a=Qt(b,i,{filterByMeasure:f}),fn(b,a,m)),l.forEach((function(e){var t=Qt(y,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},yn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Lt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},mn=function(e,t){return e.numberFormat?e.numberFormat()(t):t},bn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),en(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=yn(e,i,r),u=void 0;a===l.ALL?u=[tn(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),tn(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=tn(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),wn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},_n=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=je.apply(void 0,i);return Gt(this,a,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Oe.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Gt(this,a,U.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new T(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=rn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,On(t).concat([e,p,s]))}));var h=at([d],[e],[e.name]),v=wn(h,1)[0];return c.addField(v),Gt(this,c,U.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=sn(this),c=u._propagationNameSpace,f=ln(this),l={groupByModel:f,model:u};return n&&vn(c,t,this),dn(e,l,{propagationNameSpace:c,sourceId:a},Object.assign({payload:o},t)),i&&hn(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=yn([].concat(On(e),On(t)),a,i)})),nn(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Oe}},{key:"Converters",get:function(){return Mt}},{key:"FieldTypes",get:function(){return it}}]),t}(gn),jn=me.sum,An=me.avg,kn=me.min,Dn=me.max,Sn=me.first,Fn=me.last,Tn=me.count,Nn=me.std,Rn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,B(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Gt(e,r,U.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n // constructor (schema) {\n // super();\n // this.schema = schema;\n // this._dtf = new DateTimeFormatter(format);\n // }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from './model/dataConverter';\nimport DSVStringConverter from './defaultConverters/dsvStringConverter';\nimport JSONConverter from './defaultConverters/jsonConverter';\nimport DSVArrayConverter from './defaultConverters/dsvArrayConverter';\nimport AutoDataConverter from './defaultConverters/autoCoverter';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n *\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/defaultConverters/autoCoverter.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","len","store","fields","forEach","fieldIndex","Array","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","rawValue","formattedValue","getNumberFormattedVal","defineProperties","_value","configurable","writable","_formattedValue","_internalValue","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","formattedData","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","dataType","sortType","retFunc","getSortFn","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","applyStandardSort","sortingDetails","fieldName","sortMeta","fieldInSchema","sortingFn","slice","f","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","sortData","dataObj","filter","sDetial","groupSortingIdx","findIndex","standardSortingDetails","groupSortingDetails","detail","sortedGroupMap","row","nextMap","applyGroupSort","uids","pop","dataBuilder","colIdentifier","addUid","columnWise","retObj","reqSorting","tmpDataArr","colName","insertInd","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","filteredValues","min","max","sqrt","mean","num","variance","defaultReducerName","ReducerStore","defReducer","entries","reducer","__unregister","delete","Function","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","rowCount","hash","_","cachedStore","cloneProvider","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","internalValue","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","PartialField","_sanitize","Field","subtype","description","displayName","_params","_context","build","Dimension","_cachedDomain","calculateDataDomain","Measure","unit","numberFormat","FieldParser","CategoricalParser","isInvalid","getInvalidType","trim","Categorical","Set","domain","add","TemporalParser","_dtf","Temporal","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","dataFormat","parsedDatum","BinnedParser","matched","parseFloat","Binned","binsArr","bins","ContinuousParser","Continuous","NEGATIVE_INFINITY","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","fieldRegistry","createFields","dataColumn","headers","headersObj","header","BUILDER","createUnitField","DataConverter","_type","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","JSON","stringify","inferColumns","rows","columnSet","column","width","formatDate","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","N","I","eof","eol","j","preformatBody","formatValue","formatRow","test","convert","customConverter","concat","formatBody","formatRows","csv","dsv","tsv","DSVArr","schemaFields","unitSchema","firstRowHeader","columnMajor","headerMap","h","schemaField","headIndex","DSVStr","fieldSeparator","d3Dsv","DSVStringConverter","FlatJSON","insertionIndex","schemaFieldsName","JSONConverter","DSVArrayConverter","Auto","converters","AutoDataConverter","DataConverterStore","_getDefaultConverters","converter","converterStore","prepareSelectionData","rawData","resp","updateFields","partialFieldspace","fieldStoreName","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistCurrentDerivation","model","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","persistDerivations","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","getPartialFieldspace","formattedFieldsData","rawFieldsData","cloneWithAllFields","clone","calculateFieldsConfig","getKey","fn","filterPropagationModel","propModels","fns","filterByMeasure","clonedModel","modelFieldsConfig","getFieldsConfig","propModel","keyFn","getData","fieldsConfig","dLen","indices","fieldsSpace","v","valuesMap","present","every","select","saveChild","some","addDiffsetToClonedDm","selectConfig","cloneWithProject","projField","allFields","cloned","projectionSet","actualProjField","splitWithProject","projFieldSet","projFields","sanitizeUnitSchema","sanitizeAndValidateSchema","validateUnitSchema","updateData","relation","defaultConfig","dataHeader","fieldNameAs","as","resolveFieldName","nameSpace","_partialFieldspace","valueObjects","_cachedValueObjects","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","derivation","params","groupByString","getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","propagate","handlePropagation","children","_children","child","getRootGroupByModel","_parent","find","getRootDataModel","getPathToRootModel","path","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","values","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","propagateImmutableActions","immutableActions","filterImmutableAction","criteriaModel","addToPropNamespace","sourceNamespace","isMutableAction","getNormalizedProFields","fieldConfig","normalizedProjField","constructor","search","Relation","source","_fieldStoreName","_propagationNameSpace","_fieldspace","joinWith","unionWith","differenceWith","defConfig","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithSelect","setParent","_fieldConfig","fieldObj","def","removeChild","sibling","parent","_onPropagation","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","ids","fill","fieldsArr","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","cachedValueObjects","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","addField","addToNameSpace","payload","eventName","measureFieldName","binFieldName","measureField","binsCount","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","clonedDMs","splitWithSelect","uniqueFields","commonFields","normalizedProjFieldSets","fieldSet","first","last","count","sd","std","Operators","compose","operations","currentDM","firstChild","dispose","bin","project","calculateVariable","naturalJoin","fullOuterJoin","version","Stats","FieldsUtility","enums"],"mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,YAAa,GAAIH,GACE,iBAAZC,QACdA,QAAmB,UAAID,IAEvBD,EAAgB,UAAIC,IARtB,CASGK,QAAQ,WACX,O,YCTE,IAAIC,EAAmB,GAGvB,SAASC,EAAoBC,GAG5B,GAAGF,EAAiBE,GACnB,OAAOF,EAAiBE,GAAUP,QAGnC,IAAIC,EAASI,EAAiBE,GAAY,CACzCC,EAAGD,EACHE,GAAG,EACHT,QAAS,IAUV,OANAU,EAAQH,GAAUI,KAAKV,EAAOD,QAASC,EAAQA,EAAOD,QAASM,GAG/DL,EAAOQ,GAAI,EAGJR,EAAOD,QA0Df,OArDAM,EAAoBM,EAAIF,EAGxBJ,EAAoBO,EAAIR,EAGxBC,EAAoBQ,EAAI,SAASd,EAASe,EAAMC,GAC3CV,EAAoBW,EAAEjB,EAASe,IAClCG,OAAOC,eAAenB,EAASe,EAAM,CAAEK,YAAY,EAAMC,IAAKL,KAKhEV,EAAoBgB,EAAI,SAAStB,GACX,oBAAXuB,QAA0BA,OAAOC,aAC1CN,OAAOC,eAAenB,EAASuB,OAAOC,YAAa,CAAEC,MAAO,WAE7DP,OAAOC,eAAenB,EAAS,aAAc,CAAEyB,OAAO,KAQvDnB,EAAoBoB,EAAI,SAASD,EAAOE,GAEvC,GADU,EAAPA,IAAUF,EAAQnB,EAAoBmB,IAC/B,EAAPE,EAAU,OAAOF,EACpB,GAAW,EAAPE,GAA8B,iBAAVF,GAAsBA,GAASA,EAAMG,WAAY,OAAOH,EAChF,IAAII,EAAKX,OAAOY,OAAO,MAGvB,GAFAxB,EAAoBgB,EAAEO,GACtBX,OAAOC,eAAeU,EAAI,UAAW,CAAET,YAAY,EAAMK,MAAOA,IACtD,EAAPE,GAA4B,iBAATF,EAAmB,IAAI,IAAIM,KAAON,EAAOnB,EAAoBQ,EAAEe,EAAIE,EAAK,SAASA,GAAO,OAAON,EAAMM,IAAQC,KAAK,KAAMD,IAC9I,OAAOF,GAIRvB,EAAoB2B,EAAI,SAAShC,GAChC,IAAIe,EAASf,GAAUA,EAAO2B,WAC7B,WAAwB,OAAO3B,EAAgB,SAC/C,WAA8B,OAAOA,GAEtC,OADAK,EAAoBQ,EAAEE,EAAQ,IAAKA,GAC5BA,GAIRV,EAAoBW,EAAI,SAASiB,EAAQC,GAAY,OAAOjB,OAAOkB,UAAUC,eAAe1B,KAAKuB,EAAQC,IAGzG7B,EAAoBgC,EAAI,GAIjBhC,EAAoBA,EAAoBiC,EAAI,G,+jEClFrD,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,G,k3BCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCCKC,EANU,CACrBC,YAAa,cACbC,SAAU,WACVC,OAAQ,UCCGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,EAXY,CACvBC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAEC9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,EAAG,CAECM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,EAAG,CAECvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAECtG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAECvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAECzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAEC3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAEC7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAECjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAEClH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,EAAG,CAECC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAECpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAECvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,EAAG,CAECG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAECxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,EAAG,CAECjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI,UAAK8F,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAX,YAAWA,IACX,kBAASA,OAAT,YAASA,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU8C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIJ,SACAK,SACAC,SACAzE,SAcJ,OAZIqE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGvD,OAAOuD,EAAO,MACxBC,GAAO,GAGXN,EAAcE,GAEdF,EADOE,GAGOD,EAGbD,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCM,IACAzE,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkB+F,WAAa,SAAU9F,GAQrC,IAPA,IAAM+F,EAAchG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCmE,EAAgBzJ,OAAO0J,KAAK1B,GAC5B2B,EAAa,GACfrK,SACAsK,UAEItK,EAAImE,EAAOoG,QAAQL,EAAalK,EAAI,KAAO,GAC/CsK,EAAcnG,EAAOnE,EAAI,IACmB,IAAxCmK,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZlE,MAAOtG,EACPyK,MAAOH,IAIf,OAAOD,GASXnG,EAAkBwG,SAAW,SAAU3G,EAAMI,GACzC,IAQIlE,EARE0K,EAAQ7G,EAAoBC,GAC5BsG,EAAanG,EAAkB+F,WAAW9F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC4E,EAAeC,OAAO1G,GACpB+F,EAAchG,EAAkBU,aAClC6F,SACAK,SACA9K,SAGJ,IAAKA,EAAI,EAAGC,EAAIoK,EAAWtE,OAAQ/F,EAAIC,EAAGD,IAEtC8K,EAAepC,EADf+B,EAAQJ,EAAWrK,GAAGyK,OACYhE,UAAUkE,GAC5CC,EAAeA,EAAajG,QAAQ,IAAIH,OAAO0F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX1G,EAAkBtC,UAAUmJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM1B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK8G,kBAAkBF,GAClCG,EAAajH,EAAkBW,wBAC/BuG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAlG,SACAvF,SACA0L,SACAC,SACA1L,SACA+H,EAAS,GAEb,IAAKuD,KAAehC,EAChB,GAAK,GAAG1H,eAAe1B,KAAKoJ,EAAegC,GAA3C,CAMA,IAJAD,EAAKvF,OAAS,EAEd0F,GADAD,EAAiBjC,EAAcgC,IACHK,OAAOJ,EAAezF,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIuL,EAAezF,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNqH,EAAQF,EAAexL,IACFO,OAGjB+K,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOnG,IAM1B,GAAI,OAFJoG,EAAcF,EAAWI,MAAMzH,KAAMkH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWtF,QAAU3B,KAAK0H,gBAAgBT,EAAWtF,QAErDiC,EAAO+D,QAAQV,EAAW,GAAI,EAAG,GAEjCrD,EAAO+D,QAAP,MAAA/D,EAAkBqD,GAGfrD,GAQX9D,EAAkBtC,UAAUsJ,kBAAoB,SAAUF,GACtD,IAYI/K,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCkE,EAAchG,EAAkBU,aAChCyF,EAAanG,EAAkB+F,WAAW9F,GAC1C6H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGArM,SAEJqM,EAAcxB,OAAO1G,GAErB,IAAMmI,EAAWjC,EAAWkC,KAAI,SAAAC,GAAA,OAAOA,EAAI/B,SACrCgC,EAAmBpC,EAAWtE,OACpC,IAAK/F,EAAIyM,EAAmB,EAAGzM,GAAK,EAAGA,KACnCmM,EAAW9B,EAAWrK,GAAGsG,OAEV,IAAM+F,EAAYtG,OAAS,QAKdzB,IAAxB2H,IACAA,EAAsBI,EAAYtG,QAGtCqG,EAAaC,EAAYpE,UAAUkE,EAAW,EAAGF,GACjDI,EAAcA,EAAYpE,UAAU,EAAGkE,EAAW,GAC9C3H,OAAOC,OAAO2H,GACdC,EAAYpE,UAAUgE,EAAqBI,EAAYtG,QAE3DkG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKnM,EAAI,EAAGA,EAAIyM,EAAkBzM,IAC9BkM,EAAS7B,EAAWrK,GACpBqM,EAAcA,EAAY1H,QAAQuF,EAAcgC,EAAOzB,MAAO/B,EAAYwD,EAAOzB,OAAOlE,WAG5F,IAAMmG,EAAgB1B,EAAc2B,MAAM,IAAInI,OAAO6H,KAAiB,GAGtE,IAFAK,EAAcE,QAET5M,EAAI,EAAGC,EAAIqM,EAASvG,OAAQ/F,EAAIC,EAAGD,IACpCgM,EAASM,EAAStM,IAAM0M,EAAc1M,GAE1C,OAAOgM,GAQX9H,EAAkBtC,UAAUiL,cAAgB,SAAU7B,GAClD,IAAIjH,EAAO,KACX,GAAI+I,OAAOrH,SAASuF,GAChBjH,EAAO,IAAIC,KAAKgH,QACb,IAAK5G,KAAKD,QAAUH,KAAK+G,MAAMC,GAClCjH,EAAO,IAAIC,KAAKgH,OAEf,CACD,IAAM3G,EAAWD,KAAKC,SAAWD,KAAK2G,MAAMC,GACxC3G,EAAS0B,SACT3B,KAAKG,WAAL,kCAAsBP,KAAtB,c,sHAAA,CAA8BK,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUkK,gBAAkB,SAASiB,GACnD,OAAe,IAARA,GAAa3I,KAAKD,OAAOwI,MAAM,QAAQ5G,QASlD7B,EAAkBtC,UAAU8I,SAAW,SAAUvG,EAAQ6G,GACrD,IAAIzG,SAQJ,OANIyG,EACAzG,EAAaH,KAAKG,WAAaH,KAAKyI,cAAc7B,IACzCzG,EAAaH,KAAKG,cAC3BA,EAAaH,KAAKyI,cAAc7B,IAG7B9G,EAAkBwG,SAASnG,EAAYJ,ICruBnC,eAAC6I,GACZ,IAAIhN,EAAI,EACR,OAAO,WAAe,2BAAXiN,EAAW,qBAAXA,EAAW,gBAClBA,EAAOC,SAAQ,SAAC3H,EAAK4H,GACXH,EAAMG,aAAuBC,QAC/BJ,EAAMG,GAAcC,MAAMC,KAAK,CAAEtH,OAAQ/F,KAE7CgN,EAAMG,GAAY3C,KAAKjF,MAE3BvF,M,4MCdFsN,EAAe,SACfC,EAAgB7M,OAAOkB,UAAU+E,SACjC6G,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAelB,EAAKmB,GAIzB,IAHA,IAAI3N,EAAI2N,EAAU5H,OACd6H,GAAU,EAEP5N,GAAG,CACN,GAAIwM,IAAQmB,EAAU3N,GAElB,OADA4N,EAAS5N,EAGbA,GAAK,EAGT,OAAO4N,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,cAAOA,MAASR,SAAgB,IAAOS,EAAP,cAAOA,MAAST,EACzC,WAGP,IAAOS,EAAP,cAAOA,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,cAAOA,MAASR,IAChBQ,EAAOC,aAAgBX,MAAQ,GAAK,IAnH5C,SAASa,EAAMH,EAAMC,EAAMC,EAAWE,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAO1D,KAAKsD,GACZK,EAAO3D,KAAKuD,KALZG,EAAS,CAACJ,GACVK,EAAS,CAACJ,IAOVA,aAAgBX,MAChB,IAAKgB,EAAO,EAAGA,EAAOL,EAAKhI,OAAQqI,GAAQ,EAAG,CAC1C,IACIC,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,eAGA,IAAO6G,EAAP,cAAOA,MAAWhB,EACZU,QAAwB1J,IAAXgK,IACfR,EAAKM,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQE,aAAkBlB,MAAQ,GAAK,KAG3C,KADdoB,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,SAMrD,IAAKC,KAAQL,EAAM,CACf,IACIM,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,SAGJ,GAAe,OAAX6G,SAAmB,IAAOA,EAAP,cAAOA,MAAWhB,GAKrCiB,EAAMhB,EAAcpN,KAAKmO,MACbd,GACO,OAAXa,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAGxCI,IAAQd,GACE,OAAXY,GAAqBA,aAAkBjB,QACvCiB,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAI7CL,EAAKM,GAAQE,MAGhB,CACD,GAAIN,QAAwB1J,IAAXgK,EACb,SAEJR,EAAKM,GAAQE,GAIzB,OAAOR,EAiBPG,CAAMH,EAAMC,EAAMC,GACXF,GCnIJ,SAASW,EAASlJ,GACrB,OAAO6H,MAAMqB,QAAQlJ,GA2ClB,IAAMmJ,EAAc,wBAAY,IAAI1K,MAAO2K,UAAYvG,KAAKwG,MAAsB,IAAhBxG,KAAKyG,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKhJ,SAAWiJ,EAAKjJ,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAI+O,EAAKhJ,OAAQ/F,IAC7B,GAAI+O,EAAK/O,KAAOgP,EAAKhP,GACjB,OAAO,EAIf,OAAO,EASJ,SAASiP,EAAa1J,GACzB,OAAOA,EASJ,IAAM2J,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFhN,EAAWE,QACXoM,EAAQU,IAASV,EAAQU,EAAK,IAC9BhN,EAAWG,QACXmM,EAAQU,KAA0B,IAAhBA,EAAKpJ,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4B6J,CAASD,EAAK,KACrDhN,EAAWC,UAEf,MChDIiN,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUhP,GACvB,IAAMiP,EAASjP,GAAQmO,IA4CvB,OA1CAtK,KAAK+K,KAAKK,GAAU,CAChBjP,KAAMiP,EACNvC,OAAQsC,EAERE,UAJgB,WAKZ,IAAIA,EAAYrL,KAAKsL,iBAQrB,OANKD,IACDA,EAAYrL,KAAKsL,iBAAmB,GACpCtL,KAAK6I,OAAOC,SAAQ,SAACyC,GACjBF,EAAUE,EAAMpP,QAAUoP,MAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBzL,KAAK0L,eAUzB,OARKD,IACDA,EAAgBzL,KAAK0L,eAAiB,GACtC1L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUC,UAClC8M,EAAcF,EAAMpP,QAAUoP,OAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkB9L,KAAK+L,iBAU3B,OARK/L,KAAK+L,mBACND,EAAkB9L,KAAK+L,iBAAmB,GAC1C/L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUE,YAClCkN,EAAgBP,EAAMpP,QAAUoP,OAIrCO,IAGR9L,KAAK+K,KAAKK,K,yPCqCVY,E,WAxEX,WAAanP,EAAOoP,EAAUV,I,4FAAO,SACjC,IAAMW,EAAiBC,GAAsBZ,EAAO1O,GAEpDP,OAAO8P,iBAAiBpM,KAAM,CAC1BqM,OAAQ,CACJ7P,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,SAEJ2P,gBAAiB,CACbhQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOqP,GAEXO,eAAgB,CACZjQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOoP,KAIfjM,KAAKuL,MAAQA,E,6CAkCb,OAAO9E,OAAOzG,KAAKnD,S,gCAUnB,OAAOmD,KAAKnD,Q,4BAnCZ,OAAOmD,KAAKqM,S,qCAOZ,OAAOrM,KAAKwM,kB,oCAOZ,OAAOxM,KAAKyM,mB,KCxDb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWhL,OAAS,GACDgL,EAAWE,MAAM,KACzB/D,SAAQ,SAACgE,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAIpR,EAAIoR,EAAOpR,GAAKqR,EAAKrR,GAAK,EAC/BgR,EAAShR,M,6PCVvBsR,E,WAqBF,WAAarQ,I,4FAAO,SAChBmD,KAAKqM,OAASxP,E,wDAdOsQ,GACrB,OAAKA,EAGE7Q,OAAO8Q,OAAOF,EAAkBG,qBAAsBF,GAFlDD,EAAkBG,yB,mCAsB7B,OAAOrN,KAAKqM,S,iCAUZ,OAAO5F,OAAOzG,KAAKqM,W,iCAGNlL,GACb,OAAQA,aAAe+L,KAAwBA,EAAkBI,mBAAmBnM,K,qCAGlEA,GAClB,OAAOA,aAAe+L,EAAoB/L,EAAM+L,EAAkBI,mBAAmBnM,O,KAO7F+L,EAAkBK,KAAO,IAAIL,EAAkB,QAC/CA,EAAkBM,GAAK,IAAIN,EAAkB,MAC7CA,EAAkBO,IAAM,IAAIP,EAAkB,OAO9CA,EAAkBG,qBAAuB,CACrCK,QAASR,EAAkBM,GAC3BG,IAAKT,EAAkBO,IACvBG,KAAMV,EAAkBK,KACxBrN,UAAWgN,EAAkBM,IAGlBN,Q,8YC5ETW,EAAkB,SAACC,EAASd,EAAOC,GAIrC,IAHA,IAAMc,EAAU,GACZC,EAAOhB,EAEJgB,EAAOf,GACVc,EAAQ3H,KAAK4H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ3H,KAAK4H,GAEND,GAGLE,EAAkB,SAACC,EAAcrR,GAOnC,IANA,IAAIsR,EAAU,EACVC,EAAWF,EAAavM,OAAS,EACjC0M,SACA7M,SAGG2M,GAAWC,GAAU,CAIxB,GAAIvR,IAFJ2E,EAAQ0M,EADRG,EAASF,EAAUnK,KAAKsK,OAAOF,EAAWD,GAAW,KAGlCnB,OAASnQ,EAAQ2E,EAAMyL,IACtC,OAAOzL,EACA3E,GAAS2E,EAAMyL,IACtBkB,EAAUE,EAAS,EACZxR,EAAQ2E,EAAMwL,QACrBoB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,EAAiB,CAC1BC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,MACLC,KAAM,QAGGC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,M,wHCzBF,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAI1G,OAAOC,SAAQ,SAACyC,GAChBmE,EAAOtJ,KAAKmF,EAAMI,SAASxP,SAE/BqT,EAAI3G,OAAOC,SAAQ,SAACyC,IAC6B,IAAzCmE,EAAOvJ,QAAQoF,EAAMI,SAASxP,OAC9BsT,EAAOrJ,KAAKmF,EAAMI,SAASxP,SAG5BsT,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqD,wDAAxBC,EAAwB,uDAAblB,EAAMC,MACtFrD,EAAS,GACTZ,EAAO,GACPmF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAchU,KAClCoU,EAAoBF,EAAclU,KAClCA,EAAUgU,EAAchU,KAAxB,IAAgCkU,EAAclU,KAC9CqU,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CAqFpB,OAlFAN,EAActH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,OAAiB6T,IACpDU,EAAUvU,KAAUgU,EAAchU,KAAlC,IAA0CuU,EAAUvU,MAExDwP,EAAOvF,KAAKsK,MAEhBL,EAAcxH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,MAC9B6T,IACDU,EAAUvU,KAAUkU,EAAclU,KAAlC,IAA0CuU,EAAUvU,KACpDwP,EAAOvF,KAAKsK,IAGhB/E,EAAOvF,KAAKsK,MAKpBhE,EAAmBmD,EAAIc,aAAa,SAAC/U,GACjC,IAAIgV,GAAW,EACXC,SACJnE,EAAmBoD,EAAIa,aAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAActH,OAAOC,SAAQ,SAACyC,GAC1BwF,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAKnP,IACnCoV,EAAQV,GAAmB/E,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAKnP,GAClCsQ,eAAgBX,EAAM2F,gBAAgBtV,OAG9CyU,EAAcxH,OAAOC,SAAQ,SAACyC,IAC+B,IAAnDiF,EAAiBrK,QAAQoF,EAAMI,SAASxP,OAAgB6T,GAC1De,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAK+F,IAEvCE,EAAQT,GAAmBhF,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAK+F,GAClC5E,eAAgBX,EAAM2F,gBAAgBJ,OAI9C,IAIMK,EAAYC,GAAgBJ,EAAQV,IACpCe,EAAYD,GAAgBJ,EAAQT,IAC1C,GAAIL,EAAmBiB,EAAWE,GALb,kBAAMxB,EAAIyB,kBACV,kBAAMxB,EAAIwB,iBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBR,EAAMjI,SAAQ,SAAC0I,EAASC,GACpBF,EAAS5F,EAAO8F,GAAKtV,MAAQqV,KAE7BZ,GAAY7B,EAAMC,QAAUiB,EAC5BlF,EAAK8F,GAAeU,GAGpBxG,EAAK3E,KAAKmL,GACVX,GAAW,EACXC,EAAcjV,QAEf,IAAKqU,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMW,EAAW,GACb5I,EAAMwH,EAActH,OAAOlH,OAAS,EACxCoP,EAAMjI,SAAQ,SAAC0I,EAASC,GAEhBF,EAAS5F,EAAO8F,GAAKtV,MADrBsV,GAAO9I,EACsB6I,EAGA,QAGrCZ,GAAW,EACXC,EAAcjV,EACdmP,EAAK3E,KAAKmL,UAKf,IAAI3T,GAAUmN,EAAMY,EAAQ,CAAExP,SCjHzC,SAASuV,EAAW1O,EAAGO,GACnB,IAAMoO,EAAKA,GAAG3O,EACR4O,EAAKA,GAAGrO,EACd,OAAIoO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB,uDAAXL,EAIrC,OAHII,EAAInQ,OAAS,GArBrB,SAASqQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKjO,KAAKsK,OAAO4D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACNzW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EAC3ByW,EAAOzW,GAAKwW,EAAQxW,GAKxB,IAHA,IAAIoH,EAAIiP,EACJ1O,EAAI4O,EAAM,EAELvW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EACvBoH,EAAImP,GACJC,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GACEA,EAAI2O,GACXE,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,GACE+O,EAAOM,EAAOrP,GAAIqP,EAAO9O,KAAO,GACvC6O,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,IAELoP,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GAqBbsG,CAAMiI,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAInQ,OAAS,EAAGoQ,GAE1BD,E,0gBChCX,SAASQ,EAAqBC,EAAUC,GACpC,IAAMC,EAAmD,SAAvChM,OAAO+L,GAAc9Q,cAA2B,OAAS,MAC3E,OA9CJ,SAAoBgR,EAAUC,GAC1B,IAAIC,SAEJ,OAAQF,GACR,KAAKlU,EAAeC,WACpB,KAAKL,EAAiBE,SAEdsU,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAAJ,OAAUP,EAAIO,GAEd,SAACP,EAAGO,GAAJ,OAAUA,EAAIP,GAE5B,MACJ,QAEQ4P,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,EAAI,GAAK,GAGd,SAACP,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,GAAK,EAAI,GAKhC,OAAOqP,EAYAC,CAAUN,EAAS3G,KAAM6G,GAUpC,SAASK,EAAW/H,EAAMhC,GACtB,IAAMgK,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAlI,EAAKjC,SAAQ,SAACoK,GACV,IAAMC,EAAWD,EAAMnK,GACnBgK,EAAQK,IAAID,GACZF,EAAYF,EAAQtW,IAAI0W,IAAW,GAAG/M,KAAK8M,IAE3CD,EAAY7M,KAAK,CAAC+M,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAYtR,OAAS,OAI5CsR,EAYX,SAASK,EAAoBC,EAAcC,EAAcC,GACrD,IAAMpO,EAAM,CACRqO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,QAAO,SAACC,EAAK5F,EAAM6F,GAE5B,OADAD,EAAI5F,GAAQuF,EAAa,GAAGpL,KAAI,SAAA+K,GAAA,OAASA,EAAMO,EAAmBI,GAAK3R,UAChE0R,IACRvO,GAEIA,EAUX,SAASyO,EAAmB/I,EAAMY,EAAQoI,GAMtC,IALA,IAAIC,SACAC,SACA1B,SACA3W,EAAImY,EAAepS,OAAS,EAEzB/F,GAAK,EAAGA,IACXoY,EAAYD,EAAenY,GAAG,GAC9BqY,EAAWF,EAAenY,GAAG,IAC7B2W,EAAW2B,GAAcvI,EAAQqI,MVrFf,mBU4FHC,EAEXpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAU0Q,EAASjR,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAC1DmI,EAAQ4J,GAAW,WAC1B,IAAMhB,EAAcH,EAAU/H,EAAMwH,EAASrQ,OACvCiS,EAAYF,EAASA,EAAStS,OAAS,GACvC6R,EAAeS,EAASG,MAAM,EAAGH,EAAStS,OAAS,GACnD8R,EAAqBD,EAAarL,KAAI,SAAAkM,GAAA,OAAKH,GAAcvI,EAAQ0I,MAEvEpB,EAAYnK,SAAQ,SAACyK,GACjBA,EAAanN,KAAKkN,EAAmBC,EAAcC,EAAcC,OAGrE5B,EAAUoB,GAAa,SAACjQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO4Q,EAAUnY,EAAGqB,MAIxB0N,EAAKpJ,OAAS,EACdsR,EAAYnK,SAAQ,SAACoK,GACjBnI,EAAK3E,KAAL,MAAA2E,EAAA,EAAamI,EAAM,QAnBG,GAqBvB,WACH,IAAMnB,EAASO,EAAoBC,EAAU0B,GAE7CpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAUwO,EAAO/O,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAH5D,IAiBf,I,EAAMoS,GAAsB,SAAtBA,EAAuBC,EAAYxJ,EAAMY,EAAQoI,GACnD,GAA0B,IAAtBQ,EAAW5S,OAAgB,OAAOoJ,EAEtC,IAAMyJ,EAAYD,EAAW,GACvBpM,EAAM,IAAI6K,IAEhBjI,EAAK4I,QAAO,SAACC,EAAKa,GACd,IAAMC,EAAOD,EAAQD,EAAUtS,OAM/B,OALI0R,EAAIR,IAAIsB,GACRd,EAAInX,IAAIiY,GAAMtO,KAAKqO,GAEnBb,EAAIP,IAAIqB,EAAM,CAACD,IAEZb,IACRzL,GAdmE,2BAgBtE,YAAuBA,EAAvB,+CAA4B,wBAAlBhL,EAAkB,KAAbgE,EAAa,KAClBwT,EAAOL,EAAoBC,EAAWH,MAAM,GAAIjT,EAAKwK,EAAQoI,GACnE5L,EAAIkL,IAAIlW,EAAKwX,GACT3L,MAAMqB,QAAQsK,IACdb,EAAkBa,EAAMhJ,EAAQoI,IApB8B,6EAwBtE,OAAO5L,GA2CJ,SAASyM,GAAUC,EAASd,GAAgB,IACzCpI,EAAiBkJ,EAAjBlJ,OAAQZ,EAAS8J,EAAT9J,KAGd,GAA8B,KAD9BgJ,EAAiBA,EAAee,QAAO,SAAAC,GAAA,QAAab,GAAcvI,EAAQoJ,EAAQ,QAC/DpT,OAAnB,CAEA,IAAIqT,EAAkBjB,EAAekB,WAAU,SAAAF,GAAA,OAA0B,OAAfA,EAAQ,MAClEC,GAAuC,IAArBA,EAAyBA,EAAkBjB,EAAepS,OAE5E,IAAMuT,EAAyBnB,EAAeK,MAAM,EAAGY,GACjDG,EAAsBpB,EAAeK,MAAMY,GAEjDlB,EAAkB/I,EAAMY,EAAQuJ,GAChCnK,EA5CJ,SAAyBA,EAAMY,EAAQoI,EAAgBQ,GAQnD,GAA8B,KAP9BR,EAAiBA,EAAee,QAAO,SAACM,GACpC,OAAkB,OAAdA,EAAO,KACPb,EAAWnO,KAAKgP,EAAO,KAChB,OAIIzT,OAAgB,OAAOoJ,EAE1CwJ,EAAaA,EAAWpM,KAAI,SAAAlM,GAAA,OAAKiY,GAAcvI,EAAQ1P,MAEvD,IAAMoZ,EAAiBf,GAAoBC,EAAYxJ,EAAMY,EAAQoI,GACrE,OAAOhJ,EAAK5C,KAAI,SAACmN,GAIb,IAHA,IAAI1Z,EAAI,EACJ2Z,EAAUF,GAENrM,MAAMqB,QAAQkL,IAClBA,EAAUA,EAAQ9Y,IAAI6Y,EAAIf,EAAW3Y,KAAKsG,QAG9C,OAAOqT,EAAQ/M,WAuBZgN,CAAezK,EAAMY,EAAQwJ,EAAqBD,EAAuB/M,KAAI,SAAAiN,GAAA,OAAUA,EAAO,OAErGP,EAAQY,KAAO1K,EAAK5C,KAAI,SAAAmN,GAAA,OAAOA,EAAII,SACnCb,EAAQ9J,KAAOA,GCjPZ,SAAS4K,GAAa1K,EAAY0B,EAAYiJ,EAAe7B,EAAgBlN,GAKhFA,EAAUvK,OAAO8Q,OAAO,GAJL,CACfyI,QAAQ,EACRC,YAAY,GAEwBjP,GAExC,IAAMkP,EAAS,CACXpK,OAAQ,GACRZ,KAAM,GACN0K,KAAM,IAEJI,EAAShP,EAAQgP,OACjBG,EAAajC,GAAkBA,EAAepS,OAAS,EAEvDsU,EAAa,GAiDnB,GA/CgBL,EAAc/I,MAAM,KAE5B/D,SAAQ,SAACoN,GACb,IAAK,IAAIta,EAAI,EAAGA,EAAIqP,EAAWtJ,OAAQ/F,GAAK,EACxC,GAAIqP,EAAWrP,GAAGO,SAAW+Z,EAAS,CAClCD,EAAW7P,KAAK6E,EAAWrP,IAC3B,UAMZqa,EAAWnN,SAAQ,SAACyC,GAEhBwK,EAAOpK,OAAOvF,KAAKmF,EAAMI,aAGzBkK,GACAE,EAAOpK,OAAOvF,KAAK,CACfjK,KAAM,MACNyP,KAAM,eAIdc,EAAmBC,GAAY,SAAC/Q,GAC5Bma,EAAOhL,KAAK3E,KAAK,IACjB,IAAM+P,EAAYJ,EAAOhL,KAAKpJ,OAAS,EAEvCsU,EAAWnN,SAAQ,SAACyC,EAAOuF,GACvBiF,EAAOhL,KAAKoL,GAAWrF,EAFf,GAE6BvF,EAAM0F,aAAalG,KAAKnP,MAE7Dia,IACAE,EAAOhL,KAAKoL,GAAWF,EAAWtU,QAAU/F,GAGhDma,EAAON,KAAKrP,KAAKxK,GAIboa,GAAcD,EAAOhL,KAAKoL,GAAW/P,KAAKxK,MAI9Coa,GACApB,GAASmB,EAAQhC,GAGjBlN,EAAQiP,WAAY,CACpB,IAAMM,EAAUpN,mB,sHAAAA,CAASA,MAAM+M,EAAOpK,OAAOhK,UAASwG,KAAI,iBAAM,MAChE4N,EAAOhL,KAAKjC,SAAQ,SAACiI,GACjBA,EAAMjI,SAAQ,SAACiC,EAAMnP,GACjBwa,EAAQxa,GAAGwK,KAAK2E,SAGxBgL,EAAOhL,KAAOqL,EAGlB,OAAOL,EC1EJ,SAASM,GAAYxG,EAAKC,GAC7B,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAiBX,SAAS2E,EAAkBC,EAAIvL,EAAWwL,GACtCnK,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACPD,GAAW9L,EAAK3E,KAAK2K,GACzBuF,EAAUQ,IAAY,MASlC,OAjCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA2BtCwa,EAAkB7G,EAAK2G,GAAuB,GAC9CE,EAAkB9G,EAAK2G,GAAuB,GAEvC,IAAI5Y,GAAUmN,EAAMY,EAAQ,CAAExP,S,sPC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAAS2X,GAAkBlF,GACvB,OAAOA,EAAIgD,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAShD,SAAS+J,GAAKnF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMkO,EAAiBF,GAAkBlF,GAIzC,OAHiBoF,EAAevV,OACZuV,EAAevD,QAAO,SAACC,EAAKuD,GAAN,OAAevD,EAAMuD,IAAM,GAC/CjK,EAAkBK,KAG5C,OAAOL,EAAkBK,KAU7B,SAAS6J,GAAKtF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMqO,EAAWJ,GAAInF,GACfnJ,EAAMmJ,EAAInQ,QAAU,EAC1B,OAAQ+G,OAAO4O,MAAMD,IAAaA,aAAoBnK,EAC7CA,EAAkBK,KAAO8J,EAAW1O,EAEjD,OAAOuE,EAAkBK,KAgG7B,IAAMgK,YACDrY,GAAM+X,IADL,KAED9X,GAAMiY,IAFL,KAGDhY,IAzFL,SAAc0S,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAKyT,IAAL,MAAAzT,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA+EvB,KAIDlO,IAzEL,SAAcyS,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAK0T,IAAL,MAAA1T,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA8DvB,KAKDjO,IAzDL,SAAgBwS,GACZ,OAAOA,EAAI,MAmDT,KAMDvS,IA/CL,SAAeuS,GACX,OAAOA,EAAIA,EAAInQ,OAAS,MAwCtB,KAODnC,IArCL,SAAgBsS,GACZ,OAAIzH,EAAQyH,GACDA,EAAInQ,OAERuL,EAAkBK,QA0BvB,KAQD9N,IAbL,SAAcqS,GACV,OAAO9N,KAAK2T,KAbhB,SAAmB7F,GACf,IAAI8F,EAAOR,GAAItF,GACf,OAAOsF,GAAItF,EAAI3J,KAAI,SAAA0P,GAAA,gBAAQA,EAAMD,EAAS,OAWzBE,CAAShG,OAIxB,GAWAiG,GAAqB7Y,G,0PCzCnB0J,GAjGFoP,G,WACF,aAAe,Y,4FAAA,SACXhY,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK4I,MAAMyK,IAAI,aAAc4E,IAE7B3b,OAAO4b,QAAQX,IAAQzO,SAAQ,SAAC3L,GAC5B,EAAKyL,MAAMyK,IAAIlW,EAAI,GAAIA,EAAI,O,oDAc/B,IAAK,UAAOwE,OACR,OAAO3B,KAAK4I,MAAMnM,IAAI,cAG1B,IAAI0b,EAAUA,UAAVA,8BAEJ,GAAuB,mBAAZA,EACPnY,KAAK4I,MAAMyK,IAAI,aAAc8E,OAC1B,CAEH,GADAA,EAAU1R,OAAO0R,IAC6B,IAA1C7b,OAAO0J,KAAKuR,IAAQpR,QAAQgS,GAG5B,MAAM,IAAI1H,MAAJ,WAAqB0H,EAArB,0BAFNnY,KAAK4I,MAAMyK,IAAI,aAAckE,GAAOY,IAK5C,OAAOnY,O,+BAmCD7D,EAAMgc,GAAS,WACrB,GAAuB,mBAAZA,EACP,MAAM,IAAI1H,MAAM,gCAMpB,OAHAtU,EAAOsK,OAAOtK,GACd6D,KAAK4I,MAAMyK,IAAIlX,EAAMgc,GAEd,WAAQ,EAAKC,aAAajc,M,mCAGvBA,GACN6D,KAAK4I,MAAMwK,IAAIjX,IACf6D,KAAK4I,MAAMyP,OAAOlc,K,8BAIjBA,GACL,OAAIA,aAAgBmc,SACTnc,EAEJ6D,KAAK4I,MAAMnM,IAAIN,O,KAgBfoc,IARO,QAHd3P,GAAQ,QAIJA,GAAQ,IAAIoP,IAETpP,I,+YC5Cf,SAAS4P,GAASC,EAAWtN,EAAUuN,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWtN,GAC7B,IAAMsE,EAAS,GAEToJ,EADaJ,EAAUrI,gBACCvE,eAY9B,OAVAvP,OAAO4b,QAAQW,GAAY/P,SAAQ,YAAW,IAAT3L,EAAS,WACtCgO,GAAYA,EAASxJ,QACU,IAA3BwJ,EAAShF,QAAQhJ,IACjBsS,EAAOrJ,KAAKjJ,GAGhBsS,EAAOrJ,KAAKjJ,MAIbsS,EAyCWqJ,CAAYL,EAAWtN,GACnC4N,EAhCV,SAAwBN,GAA0B,IAAfC,EAAe,uDAAJ,GACpC3C,EAAS,GAETiD,EADaP,EAAUrI,gBACD5E,aACtByM,EAAaM,GAAaU,iBAchC,OAZA3c,OAAO0J,KAAKgT,GAAUlQ,SAAQ,SAACoQ,GACU,iBAA1BR,EAASQ,KAChBR,EAASQ,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYb,GAAac,QAAQX,EAASQ,IAC5CE,EACArD,EAAOmD,GAAeE,GAEtBrD,EAAOmD,GAAejB,EACtBS,EAASQ,GAAenB,OAGzBhC,EAcYuD,CAAcb,EAAWC,GACtCzN,EAAawN,EAAUrI,gBACvBmJ,EAAgBtO,EAAWI,YAC3BmO,EAASvO,EAAW9O,KACpBsd,EAAe,GACfC,EAAa,GACb/N,EAAS,GACToH,EAAU,GACVhI,EAAO,GACT4O,SAGJrd,OAAO4b,QAAQqB,GAAezQ,SAAQ,YAAkB,cAAhB3L,EAAgB,KAAXN,EAAW,KACpD,IAAgC,IAA5B+b,EAAUzS,QAAQhJ,IAAe4b,EAAW5b,GAG5C,OAFAwO,EAAOvF,KAAKqD,EAAQ,GAAI5M,EAAM8O,WAEtB9O,EAAM8O,SAASC,MACvB,KAAKlN,EAAUC,QACX+a,EAAWtT,KAAKjJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX6a,EAAarT,KAAKjJ,OAK9B,IAAIyc,EAAW,EACflN,EAAmB+L,EAAU9H,aAAa,SAAC/U,GACvC,IAAIie,EAAO,GACXJ,EAAa3Q,SAAQ,SAACgR,GAClBD,EAAUA,EAAV,IAAkBN,EAAcO,GAAG7I,aAAalG,KAAKnP,WAEnCsE,IAAlB6S,EAAQ8G,IACR9G,EAAQ8G,GAAQD,EAChB7O,EAAK3E,KAAK,IACVqT,EAAa3Q,SAAQ,SAACgR,GAClB/O,EAAK6O,GAAUE,GAAKP,EAAcO,GAAG7I,aAAalG,KAAKnP,MAE3D8d,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAK6O,GAAUE,GAAK,CAACP,EAAcO,GAAG7I,aAAalG,KAAKnP,OAE5Dge,GAAY,GAEZF,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAKgI,EAAQ8G,IAAOC,GAAG1T,KAAKmT,EAAcO,GAAG7I,aAAalG,KAAKnP,UAM3E,IAAIme,EAAc,GACdC,EAAgB,kBAAMvB,EAAUnH,gBAcpC,OAbAvG,EAAKjC,SAAQ,SAACwM,GACV,IAAMvE,EAAQuE,EACdoE,EAAW5Q,SAAQ,SAACgR,GAChB/I,EAAM+I,GAAKf,EAAWe,GAAGxE,EAAIwE,GAAIE,EAAeD,SAGpDpB,GACAA,EAAkBsB,wBAClBN,EAAehB,GAGfgB,EAAe,IAAI/b,GAAUmN,EAAMY,EAAQ,CAAExP,KAAMqd,IAEhDG,EC9HJ,SAASO,GAAmBrK,EAAKC,GACpC,IAIMqK,EAAkB7K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACe,EAAWE,GACf,IAAI+I,GAAc,EASlB,OARAD,EAAgBrR,SAAQ,SAACkL,GAGjBoG,IAFAjJ,EAAU6C,GAAWqG,gBACrBhJ,EAAU2C,GAAWqG,gBAAiBD,MAMvCA,GCjBR,SAASE,GAAOzK,EAAKC,GACxB,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAgBX,SAAS2E,EAAmBC,EAAIvL,GAC5BqB,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACX/L,EAAK3E,KAAK2K,GACVuF,EAAUQ,IAAY,MASlC,OAhCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA0BtCwa,EAAkB9G,EAAK2G,GACvBG,EAAkB7G,EAAK2G,GAEhB,IAAI7Y,GAAUmN,EAAMY,EAAQ,CAAExP,SCvDlC,SAASoe,GAAeC,EAAYC,EAAY1K,GACnD,OAAOH,EAAa4K,EAAYC,EAAY1K,GAAU,EAAOhB,EAAME,WAGhE,SAASyL,GAAgBF,EAAYC,EAAY1K,GACpD,OAAOH,EAAa6K,EAAYD,EAAYzK,GAAU,EAAOhB,EAAMG,Y,8PCFlDyL,G,WAUjB,WAAaxe,EAAM4O,EAAMY,EAAQvJ,I,4FAAQ,SACrCpC,KAAK7D,KAAOA,EACZ6D,KAAK2L,OAASA,EACd3L,KAAKoC,OAASA,EACdpC,KAAK+K,KAAO/K,KAAK4a,UAAU7P,G,6CAUpBA,GAAM,WACb,OAAOA,EAAK5C,KAAI,SAAA+K,GAAA,OAAS,EAAK9Q,OAAOuE,MAAMuM,EAAO,CAAEnT,OAAQ,EAAK4L,OAAO5L,gB,+PCX3D8a,G,WAQjB,WAAa5J,EAActE,I,4FAAY,SACnC3M,KAAKiR,aAAeA,EACpBjR,KAAK2M,WAAaA,E,4CAclB,MAAM,IAAI8D,MAAM,yB,+BAUhB,OAAOzQ,KAAKiR,aAAatF,S,6BAUzB,OAAO3L,KAAKiR,aAAa9U,O,6BAUzB,OAAO6D,KAAKiR,aAAatF,OAAOC,O,gCAUhC,OAAO5L,KAAKiR,aAAatF,OAAOmP,U,oCAUhC,OAAO9a,KAAKiR,aAAatF,OAAOoP,c,oCAUhC,OAAO/a,KAAKiR,aAAatF,OAAOqP,aAAehb,KAAKiR,aAAatF,OAAOxP,O,6BASpE,WACE4O,EAAO,GAIb,OAHA2B,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjCmP,EAAK3E,KAAK,EAAK6K,aAAalG,KAAKnP,OAE9BmP,I,sCAUP,MAAM,IAAI0F,MAAM,0B,gCA9FhB,MAAM,IAAIA,MAAM,yB,8BAyIhB,MAvCgB,CACZwK,QAAS,GACTC,SAAUlb,KACVgU,UAHY,SAGF7X,GAEN,OADA6D,KAAKib,QAAQ9e,KAAOA,EACb6D,MAEX2L,OAPY,SAOLA,GAEH,OADA3L,KAAKib,QAAQtP,OAASA,EACf3L,MAEX+K,KAXY,SAWPA,GAED,OADA/K,KAAKib,QAAQlQ,KAAOA,EACb/K,MAEXiR,aAfY,SAeCA,GAET,OADAjR,KAAKib,QAAQhK,aAAeA,EACrBjR,MAEX2M,WAnBY,SAmBDA,GAEP,OADA3M,KAAKib,QAAQtO,WAAaA,EACnB3M,MAEXmb,MAvBY,WAwBR,IAAIlK,EAAe,KACnB,GAAIjR,KAAKib,QAAQhK,wBAAwB0J,GACrC1J,EAAejR,KAAKib,QAAQhK,iBACzB,KAAIjR,KAAKib,QAAQtP,SAAU3L,KAAKib,QAAQlQ,KAO3C,MAAM,IAAI0F,MAAM,4BANhBQ,EAAe,IAAI0J,GAAa3a,KAAKib,QAAQ9e,KACzB6D,KAAKib,QAAQlQ,KACb/K,KAAKib,QAAQtP,OACb3L,KAAKkb,SAAS9Y,UAKtC,OAAO,IAAIpC,KAAKkb,SAASjK,EAAcjR,KAAKib,QAAQtO,kB,+PCjK/CyO,G,stBAYb,OAHKpb,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,gB,4CAUZ,MAAM,IAAI5K,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAjCmB8P,I,0PCElBU,G,stBAYb,OAHKvb,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,gB,6BAUZ,OAAOrb,KAAKiR,aAAatF,OAAO6P,O,iCAUhC,OAAOxb,KAAKiR,aAAatF,OAAOwN,UAAYpB,K,qCAShC,IACJ0D,EAAiBzb,KAAKiR,aAAatF,OAAnC8P,aACR,OAAOA,aAAwBnD,SAAWmD,EAAe5Q,I,4CAUzD,MAAM,IAAI4F,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAhEiB8P,I,0PCLhBa,G,yKAQb,MAAM,IAAIjL,MAAM,2B,+PCJHkL,G,mtBAQVxa,GAQH,OALK+L,EAAkB0O,UAAUza,GAGpB+L,EAAkB2O,eAAe1a,GAFjCsF,OAAOtF,GAAK2a,W,GAZcJ,I,0PCC1BK,G,utBASb,OAAO3d,EAAiBC,c,4CAUL,WACbwb,EAAO,IAAImC,IACXC,EAAS,GAUf,OAPAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAGb+I,K,gCAIP,OAAO,IAAIN,O,GAnCsBP,I,0PCApBe,G,mtBAoBVhb,E,GAAiB,IAAVpB,EAAU,EAAVA,OACN6D,SAKJ,GAHK5D,KAAKoc,OACNpc,KAAKoc,KAAO,IAAItc,EAAkBC,IAEjCmN,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIhB,EAAaH,KAAKoc,KAAK3T,cAActH,GACzCyC,EAASzD,EAAaA,EAAWoK,UAAY2C,EAAkBM,GAInE,OAAO5J,M,GAhC6B8X,I,0PCEvBW,G,YAQjB,WAAapL,EAActE,I,4FAAY,e,iKAAA,wDAC7BsE,EAActE,IADe,OAGnC,EAAK2P,eAAiB,KAHa,E,wXAahB,WACbzC,EAAO,IAAImC,IACXC,EAAS,GAYf,OARAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAIb+I,I,qDAWP,GAAIjc,KAAKsc,eACL,OAAOtc,KAAKsc,eAUhB,IAPA,IAAMC,EAAavc,KAAK+K,OAAO+J,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAAoB8E,MAAK,SAAChP,EAAGO,GAAJ,OAAUP,EAAIO,KACjGiZ,EAAQD,EAAW5a,OACrB8a,EAAU/T,OAAOgU,kBACjBC,SACAC,SACAC,EAAiB,EAEZjhB,EAAI,EAAGA,EAAI4gB,EAAO5gB,IACvB+gB,EAAYJ,EAAW3gB,EAAI,IAC3BghB,EAAYL,EAAW3gB,MAEL+gB,IAIlBF,EAAUzY,KAAKyT,IAAIgF,EAASG,EAAYL,EAAW3gB,EAAI,IACvDihB,KAQJ,OALKA,IACDJ,EAAU,MAEdzc,KAAKsc,eAAiBG,EAEfzc,KAAKsc,iB,+BAUZ,OAAOtc,KAAKiR,aAAatF,OAAO5L,S,sCAUnB,WACPgL,EAAO,GACP+R,EAAa9c,KAAKD,SAaxB,OAXA2M,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAErC,GAAIsR,EAAkB0O,UAAU1I,KAAY4J,GAAcpU,OAAOrH,SAAS6R,GAAS,CAE/E,IAAM6J,EAAc7P,EAAkB2O,eAAe3I,IAAUA,EAC/DnI,EAAK3E,KAAK2W,QAEVhS,EAAK3E,KAAKtG,EAAkBwG,SAAS4M,EAAO4J,OAG7C/R,K,gCAIP,OAAO,IAAIoR,O,GAjHmBf,I,0PCHjB4B,G,mtBAQV7b,GAEHA,EAAMsF,OAAOtF,GACb,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAK7ByC,EAASsJ,EAAkB2O,eAAe1a,OALP,CACnC,IAAI8b,EAAU9b,EAAIoH,MALR,2DAMV3E,EAASqZ,EAAavU,OAAOwU,WAAWD,EAAQ,IAAvC,IAA8CvU,OAAOwU,WAAWD,EAAQ,IAC9D/P,EAAkBM,GAIzC,OAAO5J,M,GApB2B8X,I,0PCArByB,G,muBASb,IAAMC,EAAUpd,KAAKiR,aAAatF,OAAO0R,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQzb,OAAS,M,6BAU7C,OAAO3B,KAAKiR,aAAatF,OAAO0R,Q,gCAIhC,OAAO,IAAIL,O,GAxBiB5B,I,0PCAfkC,G,mtBAQVnc,GACH,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIC,EAAY8b,WAAW/b,EAAK,IAChCyC,EAAS8E,OAAO4O,MAAMlW,GAAa8L,EAAkBM,GAAKpM,EAI9D,OAAOwC,M,GAjB+B8X,I,0PCGzB6B,G,utBASb,OAAO/e,EAAeC,a,4CAUH,WACfgZ,EAAM/O,OAAOgU,kBACbhF,EAAMhP,OAAO8U,kBAiBjB,OAdA9Q,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GACjCsX,aAAiBhG,IAIjBgG,EAAQuE,IACRA,EAAMvE,GAENA,EAAQwE,IACRA,EAAMxE,OAIP,CAACuE,EAAKC,M,gCAIb,OAAO,IAAI4F,O,GA1CqB/B,I,0PCNlCkC,G,WACF,c,4FAAc,SACVzd,KAAK0d,WAAa,IAAI1K,I,qDAGR8H,EAAS6C,GAEvB,OADA3d,KAAK0d,WAAWrK,IAAIyH,EAAS6C,GACtB3d,O,0BAGP4L,GACA,OAAO5L,KAAK0d,WAAWtK,IAAIxH,K,0BAG3BA,GACA,OAAO5L,KAAK0d,WAAWjhB,IAAImP,O,KAI7BgS,GAAwB,SAAChV,GAC3BA,EACiBiV,kBAAkBzf,EAAiBC,YAAa0d,IAChD8B,kBAAkBzf,EAAiBE,SAAU+d,IAC7CwB,kBAAkBzf,EAAiBG,OAAQ4e,IAC3CU,kBAAkBrf,EAAeC,WAAY8e,KAanDO,GAVQ,WACnB,IAAIlV,EAAQ,KAMZ,OAAOA,IAJHA,EAAQ,IAAI6U,GACZG,GAAsBhV,GACfA,GALQ,GCgChB,SAASmV,GAAaC,EAAYrS,EAAQsS,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQtc,SACrBsc,EAAUtS,EAAOxD,KAAI,SAAA6B,GAAA,OAAQA,EAAK7N,SAGtC8hB,EAAQnV,SAAQ,SAACqV,EAAQviB,GACrBsiB,EAAWC,GAAUviB,KAGlB+P,EAAOxD,KAAI,SAAA6B,GAAA,OAnEtB,SAAyBe,EAAMY,GAG3B,OAFAZ,EAAOA,GAAQ,GAEX+S,GAAc1K,IAAIzH,EAAOmP,SAClBgD,GAAcrhB,IAAIkP,EAAOmP,SACfsD,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WALV,MAK0B5B,EAAKpJ,OAAS,IAC9BwZ,QAEd2C,GACUrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrF+f,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WANV,MAM0B5B,EAAKpJ,OAAS,IAC9BwZ,QAgDSkD,CAAgBL,EAAWE,EAAWlU,EAAK7N,OAAQ6N,MC3ElE,QACX8S,WAAY/e,EAAWI,M,0PCANmgB,G,WACjB,WAAY1S,I,4FAAM,SACd5L,KAAKue,MAAQ3S,E,6CAQb,MAAM,IAAI6E,MAAM,qC,2BAJhB,OAAOzQ,KAAKue,U,KCThBC,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBC,GACvB,OAAO,IAAIxG,SAAS,IAAK,WAAawG,EAAQ3W,KAAI,SAAShM,EAAMP,GAC/D,OAAOmjB,KAAKC,UAAU7iB,GAAQ,OAASP,EAAI,OAC1CqH,KAAK,KAAO,KAWjB,SAASgc,GAAaC,GACpB,IAAIC,EAAY7iB,OAAOY,OAAO,MAC1B4hB,EAAU,GAUd,OARAI,EAAKpW,SAAQ,SAASwM,GACpB,IAAK,IAAI8J,KAAU9J,EACX8J,KAAUD,GACdL,EAAQ1Y,KAAK+Y,EAAUC,GAAUA,MAKhCN,EAGT,SAASjf,GAAIhD,EAAOwiB,GAClB,IAAI1hB,EAAId,EAAQ,GAAI8E,EAAShE,EAAEgE,OAC/B,OAAOA,EAAS0d,EAAQ,IAAIrW,MAAMqW,EAAQ1d,EAAS,GAAGsB,KAAK,GAAKtF,EAAIA,EAStE,SAAS2hB,GAAW3f,GAClB,IAPkBwE,EAOd3B,EAAQ7C,EAAK4f,cACbC,EAAU7f,EAAK8f,gBACfC,EAAU/f,EAAKggB,gBACfC,EAAejgB,EAAKkgB,qBACxB,OAAOvI,MAAM3X,GAAQ,iBAXHwE,EAYDxE,EAAKmgB,kBAXR,EAAI,IAAMjgB,IAAKsE,EAAM,GAC/BA,EAAO,KAAO,IAAMtE,GAAIsE,EAAM,GAC9BtE,GAAIsE,EAAM,IAS+B,IAAMtE,GAAIF,EAAKogB,cAAgB,EAAG,GAAK,IAAMlgB,GAAIF,EAAKqgB,aAAc,IAC1GJ,EAAe,IAAM/f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAAM7f,GAAI+f,EAAc,GAAK,IACnHF,EAAU,IAAM7f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAChFF,GAAWhd,EAAQ,IAAM3C,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IACjE,IAGO,gBAASS,GACtB,IAAIC,EAAW,IAAI9f,OAAO,KAAQ6f,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAU/f,EAAM+T,GACvB,IAIIvX,EAJAoiB,EAAO,GACPoB,EAAIhgB,EAAKqB,OACT4e,EAAI,EACJljB,EAAI,EAEJmjB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAASpa,IACP,GAAIma,EAAK,OAAO/B,GAChB,GAAIgC,EAAK,OAAOA,GAAM,EAAOjC,GAG7B,IAAI5iB,EAAUK,EAAPykB,EAAIH,EACX,GAAIjgB,EAAK8f,WAAWM,KAAOhC,GAAO,CAChC,KAAO6B,IAAMD,GAAKhgB,EAAK8f,WAAWG,KAAO7B,IAASpe,EAAK8f,aAAaG,KAAO7B,KAI3E,OAHK9iB,EAAI2kB,IAAMD,EAAGE,GAAM,GACdvkB,EAAIqE,EAAK8f,WAAWG,QAAU5B,GAAS8B,GAAM,EAC9CxkB,IAAM2iB,KAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,GACpEjgB,EAAK8T,MAAMsM,EAAI,EAAG9kB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOggB,EAAID,GAAG,CACZ,IAAKrkB,EAAIqE,EAAK8f,WAAWxkB,EAAI2kB,QAAU5B,GAAS8B,GAAM,OACjD,GAAIxkB,IAAM2iB,GAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,OACtE,GAAItkB,IAAMkkB,EAAW,SAC1B,OAAO7f,EAAK8T,MAAMsM,EAAG9kB,GAIvB,OAAO4kB,GAAM,EAAMlgB,EAAK8T,MAAMsM,EAAGJ,GAGnC,IA7BIhgB,EAAK8f,WAAWE,EAAI,KAAO3B,MAAW2B,EACtChgB,EAAK8f,WAAWE,EAAI,KAAO1B,MAAU0B,GA4BjCxjB,EAAIuJ,OAAaoY,IAAK,CAE5B,IADA,IAAInJ,EAAM,GACHxY,IAAM0hB,IAAO1hB,IAAM2hB,IAAKnJ,EAAIlP,KAAKtJ,GAAIA,EAAIuJ,IAC5CgO,GAA4B,OAAtBiB,EAAMjB,EAAEiB,EAAKjY,OACvB6hB,EAAK9Y,KAAKkP,GAGZ,OAAO4J,EAGT,SAASyB,EAAczB,EAAMJ,GAC3B,OAAOI,EAAK/W,KAAI,SAASmN,GACvB,OAAOwJ,EAAQ3W,KAAI,SAASiX,GAC1B,OAAOwB,EAAYtL,EAAI8J,OACtBnc,KAAKgd,MAkBZ,SAASY,EAAUvL,GACjB,OAAOA,EAAInN,IAAIyY,GAAa3d,KAAKgd,GAGnC,SAASW,EAAY/jB,GACnB,OAAgB,MAATA,EAAgB,GACjBA,aAAiB+C,KAAO0f,GAAWziB,GACnCqjB,EAASY,KAAKjkB,GAAS,IAAM,IAAOA,EAAM0D,QAAQ,KAAM,MAAU,IAClE1D,EAGR,MAAO,CACL8J,MA5FF,SAAerG,EAAM+T,GACnB,IAAI0M,EAASjC,EAASI,EAAOmB,EAAU/f,GAAM,SAASgV,EAAK1Z,GACzD,GAAImlB,EAAS,OAAOA,EAAQzL,EAAK1Z,EAAI,GACrCkjB,EAAUxJ,EAAKyL,EAAU1M,EAtD/B,SAAyByK,EAASzK,GAChC,IAAI/W,EAASuhB,GAAgBC,GAC7B,OAAO,SAASxJ,EAAK1Z,GACnB,OAAOyY,EAAE/W,EAAOgY,GAAM1Z,EAAGkjB,IAmDMkC,CAAgB1L,EAAKjB,GAAKwK,GAAgBvJ,MAGzE,OADA4J,EAAKJ,QAAUA,GAAW,GACnBI,GAuFPmB,UAAWA,EACXtgB,OA5BF,SAAgBmf,EAAMJ,GAEpB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrC,CAACJ,EAAQ3W,IAAIyY,GAAa3d,KAAKgd,IAAYgB,OAAON,EAAczB,EAAMJ,IAAU7b,KAAK,OA2B5Fie,WAxBF,SAAoBhC,EAAMJ,GAExB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrCyB,EAAczB,EAAMJ,GAAS7b,KAAK,OAuBzCke,WApBF,SAAoBjC,GAClB,OAAOA,EAAK/W,IAAI0Y,GAAW5d,KAAK,SC1IhCme,GAAMC,GAAI,KCAVC,IDEkBF,GAAIza,MACAya,GAAIf,UACPe,GAAIrhB,OACAqhB,GAAIF,WACJE,GAAID,WCNrBE,GAAI,OAEQC,GAAI3a,MACA2a,GAAIjB,UACPiB,GAAIvhB,OACAuhB,GAAIJ,WACJI,GAAIH,WC8ChBI,OAnCf,SAAgBzP,EAAKnG,EAAQ9E,GACzB,IAAKmC,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAEpB,IAGM+Q,EAAe7V,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QACzD0K,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,GAGuB7a,GAE3C,IAAMiY,EAAU,GACV1Y,EAAOub,EAAY7C,GAErBb,EAAUuD,EACV3a,EAAQ6a,iBAGRzD,EAAUnM,EAAItK,OAAO,EAAG,GAAG,IAG/B,IAAMoa,EAAY3D,EAAQtK,QAAO,SAACC,EAAKiO,EAAGjmB,GAAT,OAC7BU,OAAO8Q,OAAOwG,G,EAAYhY,G,EAAJimB,K,EAAtB,I,sGACD,IAUH,OARA/P,EAAIhJ,SAAQ,SAACD,GACT,IAAM0C,EAAQ,GAKd,OAJAiW,EAAa1Y,SAAQ,SAACgZ,GAClB,IAAMC,EAAYH,EAAUE,GAC5BvW,EAAMnF,KAAKyC,EAAOkZ,OAEf3b,eAAQmF,MAEZ,CAACiW,EAAc1C,IChBXkD,OAXf,SAAiB7X,EAAKwB,EAAQ9E,GAK1BA,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,EAChBO,eAAgB,KAEuBpb,GAE3C,IAAMwa,EAAMa,GAAMrb,EAAQob,gBAC1B,OAAOV,GAAOF,EAAIhB,UAAUlW,GAAMwB,EAAQ9E,I,0PC5BzBsb,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJpkB,EAAWE,U,0WAGb8M,EAAMY,EAAQ9E,GAClB,OAAOmb,GAAOjX,EAAMY,EAAQ9E,O,GANYyX,ICqDjC8D,OA7Bf,SAAmBtQ,EAAKnG,GACpB,IAAK3C,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAGpB,IAAM0N,EAAS,GACXviB,EAAI,EACJymB,SACEvD,EAAU,GACV1Y,EAAOub,EAAY7C,GACnBwD,EAAmB3W,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QAgB7D,OAdA2V,EAAIhJ,SAAQ,SAACkB,GACT,IAAMnB,EAAS,GACfyZ,EAAiBxZ,SAAQ,SAAC2Y,GAClBA,KAActD,EACdkE,EAAiBlE,EAAOsD,IAExBtD,EAAOsD,GAAc7lB,IACrBymB,EAAiBzmB,EAAI,GAEzBiN,EAAOwZ,GAAkBrY,EAAKyX,MAElCrb,eAAQyC,MAGL,CAACvM,OAAO0J,KAAKmY,GAASW,I,0PClDZyD,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJxkB,EAAWC,Y,0WAGb+M,EAAMY,EAAQ9E,GAClB,OAAOub,GAASrX,EAAMY,EAAQ9E,O,GANKyX,I,0PCAtBkE,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJzkB,EAAWG,U,0WAGb6M,EAAMY,EAAQ9E,GAClB,OAAO0a,GAAOxW,EAAMY,EAAQ9E,O,GANWyX,ICmBhCmE,OAXf,SAAe1X,EAAMY,EAAQ9E,GACzB,IAAM6b,EAAa,CAAEN,YAAUJ,UAAQT,WACjCzE,EAAahS,EAAiBC,GAEpC,IAAK+R,EACD,MAAM,IAAIrM,MAAM,mCAGpB,OAAOiS,EAAW5F,GAAY/R,EAAMY,EAAQ9E,I,0PChB3B8b,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJ5kB,EAAWI,O,0WAGb4M,EAAMY,EAAQ9E,GAClB,OAAO1I,GAAK4M,EAAMY,EAAQ9E,O,GANayX,I,6PCEzCsE,G,WACF,c,4FAAc,SACV5iB,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK0iB,WAAW1iB,KAAK6iB,yB,2DAIrB,MAAO,CACH,IAAIV,GACJ,IAAIK,GACJ,IAAID,GACJ,IAAII,M,mCASgB,WAAjBD,EAAiB,uDAAJ,GAEpB,OADAA,EAAW5Z,SAAQ,SAAAga,GAAA,OAAa,EAAKla,MAAMyK,IAAIyP,EAAUlX,KAAMkX,MACxD9iB,KAAK4I,Q,+BAQPka,GACL,OAAIA,aAAqBxE,IACrBte,KAAK4I,MAAMyK,IAAIyP,EAAUlX,KAAMkX,GACxB9iB,MAEJ,O,iCASA8iB,GAEP,OADA9iB,KAAK4I,MAAMyP,OAAOyK,EAAUlX,MACrB5L,O,0BAGP7D,GACA,OAAI6D,KAAK4I,MAAMwK,IAAIjX,GACR6D,KAAK4I,MAAMnM,IAAIN,GAEnB,S,KAeA4mB,GAVS,WACpB,IAAIna,EAAQ,KAMZ,OAAOA,IAHHA,EAAQ,IAAIga,IAJI,G,ioBChDxB,SAASI,GAAsBna,EAAQqI,EAAe+R,EAASrnB,GAC3D,IAAMsnB,EAAO,GADiD,uBAG9D,YAA2Bra,EAAOqP,UAAlC,+CAA6C,yBAAjC/a,EAAiC,KAA5BoO,EAA4B,KACzC2X,EAAK3X,EAAMpP,QAAU,IAAI6P,EAAMkF,EAAc/T,GAAKvB,GAAIqnB,EAAQ9lB,GAAKvB,GAAI2P,IAJb,6EAM9D,OAAO2X,EAGJ,SAAS9R,GAAiBvI,GAC7B,IAAMqa,EAAO,GAEb,IAAK,IAAM/lB,KAAO0L,EACdqa,EAAK/lB,GAAO,IAAI6O,EAAMnD,EAAO1L,GAAK+O,eAAgBrD,EAAO1L,GAAK8O,SAAU9O,GAE5E,OAAO+lB,EAGJ,IAAMC,GAAe,SAAC,EAA6BC,EAAmBC,GAAmB,cAAlE1W,EAAkE,KAAtDiJ,EAAsD,KACxF0N,EAAS1N,EAAcjU,OAASiU,EAAc/I,MAAM,KAAO,GAC3D0W,EAAkBH,EAAkB/X,YACpCmY,EAAYF,EAAOnb,KAAI,SAAAsb,GAAA,OfGxB,SAAoCxS,EAActE,GAAY,IACzDhB,EAAWsF,EAAXtF,OAER,OAAImS,GAAc1K,IAAIzH,EAAOmP,SAClBgD,GAAcrhB,IAAIkP,EAAOmP,SACfsD,QACAnN,aAAaA,GACbtE,WAAWA,GACXwO,QAEd2C,GACUrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrF+f,QACAnN,aAAaA,GACbtE,WAAWA,GACXwO,QelBkBuI,CAA2BH,EAAgBE,GAAMxS,aAActE,MAClG,OAAO1B,EAAWC,gBAAgBsY,EAAWH,IAGpCM,GAA2B,SAACC,EAAOC,GAAuC,IACzC,EADa1W,EAA4B,uDAAnB,GAAI2W,EAAe,aAC/ED,IAActV,EAAeI,SAC7BiV,EAAMG,YAAYpiB,OAAS,GAC3B,EAAAiiB,EAAMG,aAAY3d,KAAlB,WAA0B0d,KAE1BF,EAAMG,YAAY3d,KAAK,CACnB4d,GAAIH,EACJI,KAAM9W,EACN+W,SAAUJ,KAITK,GAA4B,SAACC,EAAUC,GAAU,OAC1D,EAAAA,EAAMC,qBAAoBle,KAA1B,WAAkCge,EAASE,qBAA3C,UAAmEF,EAASL,gBAGnEQ,GAAqB,SAACH,EAAUR,EAAOC,GAAuC,IAA5B1W,EAA4B,uDAAnB,GAAI2W,EAAe,aACvFH,GAAyBC,EAAOC,EAAW1W,EAAQ2W,GACnDK,GAA0BC,EAAUR,IAGlCY,aACD3lB,EAAcC,OAAS,CACpB2lB,UAAW,CAAC,cACZC,SAAU,EAAC,GAAM,KAHnB,MAKD7lB,EAAcE,QAAU,CACrB0lB,UAAW,CAAC,oBACZC,SAAU,EAAC,GAAO,KAPpB,MASD7lB,EAAcG,IAAM,CACjBylB,UAAW,CAAC,aAAc,oBAC1BC,SAAU,EAAC,GAAM,KAXnB,IAeAC,GAAqB,SAAChY,EAAY/Q,EAAGgpB,GACvC,IAA2B,IAAvBA,GAA4BhpB,IAAOgpB,EAAoB,EAAI,CAC3D,IAAMC,EAAKlY,EAAWhL,OAAS,EAE/BgL,EAAWkY,GAASlY,EAAWkY,GAAIhY,MAAM,KAAK,GAA9C,IAAoDjR,OAEpD+Q,EAAWvG,KAAX,GAAmBxK,IAIdkpB,GAA2B,SAACnY,EAAYoY,EAAShoB,GAC1D,IAEMioB,EAAgB,GAChBC,EAAgB,GAJ6C,KAM9BT,GAAcznB,GAAM2nB,SANU,GAM5DQ,EAN4D,KAM9CC,EAN8C,KAanE,OALAzY,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMwpB,EAAgBL,EAAQnpB,GAC9BwpB,GAAiBF,GAAgBP,GAAmBK,EAAeppB,GAT5C,IAUtBwpB,GAAiBD,GAAgBR,GAAmBM,EAAerpB,GAT7C,MAWpB,CACH+Q,WAAYqY,EAAc/hB,KAAK,KAC/BoiB,iBAAkBJ,EAAchiB,KAAK,OAKhCqiB,GAA0B,SAAC3Y,EAAYoY,EAAShoB,EAAM0c,EAAcF,GAC7E,IAAIqL,EAAoB,GAClBW,EAAkB,GAClBC,EAAe,GAyBrB,OAvBA9Y,EAAmBC,GAAY,SAAC/Q,GAC5B,GAAImpB,EAAQnpB,GAAI,CACZ,IAAIie,EAAO,GAEP4L,EAAe,CAAEzf,KAAM,IAE3ByT,EAAa3Q,SAAQ,SAACgR,GAClB,IAAM/O,EAAOwO,EAAcO,GAAG7I,aAAalG,KAAKnP,GAChDie,EAAUA,EAAV,IAAkB9O,EAClB0a,EAAazf,KAAK8T,GAAK/O,UAGG7K,IAA1BqlB,EAAgB1L,KAChB0L,EAAgB1L,GAAQ,GACxB+K,EAAkB/K,IAAS,EAC3B2L,EAAa3L,GAAQ4L,GAGzBd,GAAmBY,EAAgB1L,GAAOje,EAAGgpB,EAAkB/K,IAC/D+K,EAAkB/K,GAAQje,MAI3B,CACH2pB,kBACAC,iBAKKE,GAAe,SAACC,EAAUC,EAAUzY,EAAQiX,EAAUyB,GAC/D,IAAI9L,EAAc,GACdC,EAAgB,kBAAMoK,EAAS9S,gBAC3BvU,EAASoQ,EAATpQ,KACF4P,EAAagZ,EAAShV,YACtB9H,EAAS8c,EAASG,uBAAuBjd,OACzCkd,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAChD8U,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAShD,OAAO8a,EAASlZ,GAPS,SAAAzK,GAAA,OAAS0jB,EAC9B5C,GAAqBna,EAAQkd,EAAqBC,EAAe9jB,GACjEA,EACA8X,EACAD,KAG0Chd,IAGrCkpB,GAAqB,SAACrC,GAC/B,IAAM+B,EAAW/B,EAAMsC,OAAM,GACvB9C,EAAoBQ,EAAMkC,uBAShC,OARAH,EAASjP,eAAiB0M,EAAkBva,OAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK,KAG3EmgB,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KACnCia,EAAS1L,wBAAwBkM,wBAE1BR,GAGLS,GAAS,SAACtU,EAAK/G,EAAMsb,GAGvB,IAFA,IAAIlpB,EAAMkpB,EAAGvU,EAAK/G,EAAM,GAEfnP,EAAI,EAAG+M,EAAMmJ,EAAInQ,OAAQ/F,EAAI+M,EAAK/M,IACvCuB,EAASA,EAAT,IAAgBkpB,EAAGvU,EAAK/G,EAAMnP,GAElC,OAAOuB,GAGEmpB,GAAyB,SAAC1C,EAAO2C,GAA4B,IAAhBpZ,EAAgB,uDAAP,GAC3DqZ,EAAM,GACJ3C,EAAY1W,EAAO0W,WAAaxU,EAChCoX,EAAkBtZ,EAAOsZ,kBAAmB,EAC5CC,EAAcT,GAAmBrC,GACjC+C,EAAoBD,EAAYE,kBAKlCJ,EAHCD,EAAW5kB,OAGN4kB,EAAWpe,KAAI,SAAA0e,GAAA,OACbC,SACEjS,GAF0B4D,EAqCjCoO,GAnC2BE,UACpBC,EAAevO,EAAUmO,kBACzB/N,EAAavc,OAAO0J,KAAKyS,EAAUrI,gBAAgBvE,gBACpDiJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBM,EAAOpO,EAAWlX,OAClBulB,EAAUrO,EAAW1Q,KAAI,SAAAjM,GAAA,OAC3B8qB,EAAa9qB,GAAGgG,SACd8W,EAAW1c,OAAO0J,KAAKyS,EAAUrI,gBAAgB5E,cAClDsJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBQ,EAAc1O,EAAUrI,gBAAgB/E,YACxCN,EAAO8J,EAAQ9J,KACfkR,EAASjD,EAASrF,QAAO,SAACC,EAAKwT,GAEjC,OADAxT,EAAIwT,GAAKD,EAAYC,GAAGnL,SACjBrI,IACR,IACGyT,EAAY,GAElBP,EAAQ,SAAChV,EAAKwD,EAAKzB,GAAX,OAAmByB,EAAIxD,EAAI+B,KAC/BoT,GACAlc,EAAKjC,SAAQ,SAACwM,GACV,IAAMnY,EAAMipB,GAAOc,EAAS5R,EAAKwR,GACjCO,EAAUlqB,GAAO,KAIzB2pB,EAAQ,SAAChV,EAAKjJ,EAAQgL,GAAd,OAAsBhL,EAAOiJ,EAAI+B,IAAMwG,eACxCtP,EAAKpJ,OAAS,SAACkH,GAClB,IAAMye,GAAUL,GAAOI,EAAUjB,GAAOvN,EAAYhQ,EAAQie,IAE5D,OAAIL,EACOzN,EAASuO,OAAM,SAAAhc,GAAA,OAAS1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,IACxE1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,OAAO+b,EAErDA,GACP,kBAAM,GApCqB,IAAC7O,EAC5BqO,EACEjS,EACAmS,EACAnO,EAEAoO,EACAC,EAEAlO,EAEAmO,EACApc,EACAkR,EAIAoL,KAnBJ,CAAC,kBAAM,IAqDjB,OAVIxD,IAAcxU,EACEqX,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIe,OAAM,SAAAlB,GAAA,OAAMA,EAAGxd,QAAU,CACtE4e,WAAW,IAGCf,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIkB,MAAK,SAAArB,GAAA,OAAMA,EAAGxd,QAAU,CACrE4e,WAAW,KA+CVE,GAAuB,SAAChC,EAAUhZ,EAAYyX,EAAUwD,EAAchC,GAC/ED,EAAShV,YAAchE,EACvBgZ,EAAS1L,wBAAwBkM,wBACjC5B,GACIH,EACAuB,EACApX,EAAeC,OACd,CAAErB,OAAQya,GACThC,IA+BGiC,GAAmB,SAACzD,EAAU0D,EAAW3a,EAAQ4a,GAC1D,IAAMC,EAAS5D,EAAS8B,MAAM/Y,EAAOsa,WACjCQ,EAAgBH,EAiBpB,OAhBI3a,EAAOpQ,OAAS8B,EAAcE,UAC9BkpB,EAAgBF,EAAUjT,QAAO,SAAAd,GAAA,OAA+C,IAAlC8T,EAAU3hB,QAAQ6N,OAIpEgU,EAAOtR,eAAiBuR,EAAchlB,KAAK,KAC3C+kB,EAAO/N,wBAAwBkM,wBAE/B5B,GACIH,EACA4D,EACAzZ,EAAeE,QACf,CAAEqZ,YAAW3a,SAAQ+a,gBAAiBD,GACtC,MAGGD,GAIEG,GAAmB,SAAC/D,EAAUgE,EAAcjb,EAAQ4a,GAAjC,OAC5BK,EAAajgB,KAAI,SAAAkgB,GAAA,OACbR,GAAiBzD,EAAUiE,EAAYlb,EAAQ4a,OAE1CO,GAAqB,SAAC7G,GAO/B,IALAA,EAAahY,EAAQ,GAAIgY,IACT7V,OACZ6V,EAAW7V,KAAOlN,EAAUE,YAG3B6iB,EAAW3G,QACZ,OAAQ2G,EAAW7V,MACnB,KAAKlN,EAAUC,QACX8iB,EAAW3G,QAAUtc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX6iB,EAAW3G,QAAU1c,EAAiBC,YAK9C,OAAOojB,GAcE8G,GAA4B,SAAA5c,GAAA,OAAUA,EAAOxD,KAAI,SAACsZ,GAG3D,OAd8B,SAACA,GAAe,IACtC7V,EAAwB6V,EAAxB7V,KAAMkP,EAAkB2G,EAAlB3G,QAAS3e,EAASslB,EAATtlB,KACvB,GAAIyP,IAASlN,EAAUE,WAAagN,IAASlN,EAAUC,QAKnD,MAAM,IAAI8R,MAAJ,wCAAkD7E,EAAlD,aAAmEzP,EAAnE,UAJN,IAAK2hB,GAAc1K,IAAI0H,GACnB,MAAM,IAAIrK,MAAJ,mDAA6DqK,EAA7D,aAAiF3e,EAAjF,UASdqsB,CADA/G,EAAa6G,GAAmB7G,IAEzBA,MAeEgH,GAAa,SAACC,EAAU3d,EAAMY,EAAQ9E,GAC/C8E,EAAS4c,GAA0B5c,GACnC9E,EAAUvK,OAAO8Q,OAAO9Q,OAAO8Q,OAAO,GAAIub,IAAgB9hB,GAC1D,IAAMic,EAAYC,GAAetmB,IAAIoK,EAAQiW,YAG7C,IAAKgG,EACD,MAAM,IAAIrS,MAAJ,mCAA6C5J,EAAQiW,WAArD,WAPiD,MAU3BgG,EAAU/B,QAAQhW,EAAMY,EAAQ9E,GAVL,UAUpDsX,EAVoD,KAU5CjN,EAV4C,MAZ/B,SAACvF,EAAQid,GACrCjd,EAAO7C,SAAQ,SAAC2Y,GACZ,IAAMoH,EAAcpH,EAAWqH,GAC/B,GAAKD,EAAL,CAEA,IAAMhV,EAAM+U,EAAWziB,QAAQsb,EAAWtlB,MAC1CysB,EAAW/U,GAAOgV,EAClBpH,EAAWtlB,KAAO0sB,SACXpH,EAAWqH,OAetBC,CAAiBpd,EAAQwS,GACzB,IAAMhT,EAAW4S,GAAa7M,EAAevF,EAAQwS,GAG/C6K,EAAY/d,EAAWC,gBAAgBC,EAAUtE,EAAQ1K,MAC/DusB,EAASO,mBAAqBD,EAG9BN,EAAS/X,YAAcO,EAAcvP,QAAUuP,EAAc,GAAGvP,OAAzC,MAAuDuP,EAAc,GAAGvP,OAAS,GAAM,GAG9G,IAAMunB,EAAe,GACbrgB,EAAWmgB,EAAXngB,OACFmd,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAC1Cgb,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAQtD,OAPAxE,EAAmBgc,EAAS/X,aAAa,SAAC/U,GACtCstB,EAAattB,GAAKonB,GAAqBna,EAAQkd,EAAqBC,EAAepqB,MAEvFotB,EAAUG,oBAAsBD,EAEhCR,EAAShS,eAAkB/K,EAAOxD,KAAI,SAAA2R,GAAA,OAAKA,EAAE3d,QAAO8G,OACpDylB,EAASU,YAAcviB,EAAQiW,aAAe/e,EAAWI,KAAO2M,EAAiBC,GAAQlE,EAAQiW,WAC1F4L,GAGExU,GAAgB,SAACvI,EAAQJ,GAGlC,IAFA,IAAI3P,EAAI,EAEDA,EAAI+P,EAAOhK,SAAU/F,EACxB,GAAI2P,IAAUI,EAAO/P,GAAGO,KACpB,MAAO,CACHA,KAAMoP,EACNK,KAAMD,EAAO/P,GAAGkf,SAAWnP,EAAO/P,GAAGgQ,KACrC1J,MAAOtG,GAInB,OAAO,MA+BLytB,GAAgC,SAACxC,EAAWpO,GAC9C,IAAM6Q,EAAc7Q,EAAU8Q,iBAC1BC,EAAiB3C,EAerB,OAbAyC,EAAYxgB,SAAQ,SAAC2gB,GACjB,GAAKA,EAAL,CADgC,IAMjB,EANiB,EAhCF,SAACA,GACnC,IAAIC,EAAS,GACT7F,SAEJ,OADAA,EAAY4F,EAAWzF,IAEvB,KAAKzV,EAAeC,OAChBkb,EAAS,CAACD,EAAWvF,UACrB,MACJ,KAAK3V,EAAeE,QAChBib,EAAS,CAACD,EAAWxF,KAAKiE,iBAC1B,MACJ,KAAK3Z,EAAeO,KAChB4a,EAAS,CAACD,EAAWvF,UACrB,MACJ,KAAK3V,EAAeG,QAChBmV,EAAY,UACZ6F,EAAS,CAACD,EAAWxF,KAAK0F,cAAc9c,MAAM,KAAM4c,EAAWvF,UAC/D,MACJ,QACIL,EAAY,KAGhB,MAAO,CACHA,YACA6F,UAa8BE,CAAuBH,GAA7C5F,EALwB,EAKxBA,UAAW6F,EALa,EAKbA,OACnB,GAAI7F,EACA2F,GAAiB,EAAAA,GAAe3F,GAAf,WAA6B6F,GAA7B,QAAqC,CAClDjC,WAAW,UAKhB+B,GAWLK,GAAuB,SAAvBA,EAAwBpR,EAAWoO,GAA8C,IAAnC1Z,EAAmC,uDAA1B,GAAI2c,EAAsB,uDAAP,GACtEC,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,eAAiB,GAEpD,GAAIvR,IAAcsR,EAAlB,CAIA,IAAME,GAAYD,EAAcroB,SAA+C,IAAtCqoB,EAAc7jB,QAAQsS,GAE/DwR,GAAaxR,EAAUyR,kBAAkBrD,EAAW1Z,GAEpD,IAAMgd,EAAW1R,EAAU2R,UAC3BD,EAASrhB,SAAQ,SAACuhB,GACd,IAAMb,EAAiBH,GAA8BxC,EAAWwD,GAChER,EAAqBQ,EAAOb,EAAgBrc,EAAQ2c,QAI/CQ,GAAsB,SAAC1G,GAChC,KAAOA,EAAM2G,SAAW3G,EAAMG,YAAYyG,MAAK,SAAAtuB,GAAA,OAAKA,EAAE8nB,KAAOzV,EAAeG,YACxEkV,EAAQA,EAAM2G,QAElB,OAAO3G,GAGE6G,GAAmB,SAAC7G,GAC7B,KAAOA,EAAM2G,SACT3G,EAAQA,EAAM2G,QAElB,OAAO3G,GAGE8G,GAAqB,SAAC9G,GAC/B,IADoD,IAAd+G,EAAc,uDAAP,GACtC/G,EAAM2G,SACTI,EAAKvkB,KAAKwd,GACVA,EAAQA,EAAM2G,QAElB,OAAOI,GAGEC,GAA2B,SAACC,EAAaC,EAAYC,EAAgB5d,GAC9E,IAAI+W,SACA2C,SACImE,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Bje,EAAOie,4BAMvCC,EAAY,GAEhB,GAAoB,OAAhBR,IAA8C,IAAtB1d,EAAOme,WAC/BD,EAAY,CAAC,CACTnH,SAAU,KAEdA,EAAW,OACR,OACCqH,EAAkBjvB,OAAOkvB,OAAOR,EAAqBS,iBAC/B,IAAtBR,IACAM,EAAkBA,EAAgBzW,QAAO,SAAA5Y,GAAA,OAAKA,EAAEiR,OAAOge,WAAaD,MAGxE,IAAMQ,EAAmBH,EAAgBzW,QAlB5B,SAAC6W,GAEd,OADexe,EAAO4C,UAAa,kBAAM,IAC3B4b,EAAOxe,MAgBqChF,KAAI,SAAAyjB,GAAA,OAAUA,EAAOze,OAAO+W,YAEhF8F,EAAgB,GAEtB,IAA0B,IAAtBiB,EAA6B,CAC7B,IAAMY,EAAwBvvB,OAAOkvB,OAAOR,EAAqBS,gBAEjEI,EAAsB/iB,SAAQ,SAACgjB,GAC3B,IAAMC,EAAaD,EAAU3e,QACI,IAA7B4e,EAAWC,eAA2BD,EAAWH,SAAWze,EAAOye,QAC/DG,EAAWZ,WAAaD,IAC5BlB,EAAc5jB,KAAK0lB,EAAUlI,QAC7BM,EAAW2H,EAAsB/W,QAAO,SAAA5Y,GAAA,OAAKA,IAAM4vB,KAAW3jB,KAAI,SAAAjM,GAAA,OAAKA,EAAEiR,OAAO+W,aACvEviB,QAAU0pB,EAAUjlB,KAAK,CAC9B8d,WACA+H,OAAQH,EAAUlI,MAClB+G,KAAMD,GAAmBoB,EAAUlI,aAOnDM,GAAW,MAAGjD,OAAH,qBAAiByK,GAAjB,CAAmCb,KAAc/V,QAAO,SAAA5Y,GAAA,OAAW,OAANA,KACxEmvB,EAAUjlB,KAAK,CACX8d,WACA8F,wBAAmBA,EAAnB,GAAqC7c,EAAO6c,eAAiB,OAIrE,IAAMkC,EAAYpB,EAAWlH,MAEvBuI,EAAa7vB,OAAO8Q,OAAO,CAC7Bgf,kBAAmBvB,EACnBK,uBACD/d,GAEGkf,EAAmBvB,EAAWwB,aAChClB,GAA+BiB,IAC/BxF,EAAYP,GAAuB+F,EAAkBnI,EAAU,CAC3DuC,gBAAiB2E,IAErBvB,GAAqBwC,EAAkBxF,EAAWsF,IAGtDd,EAAUviB,SAAQ,SAACyjB,GACf,IAAMC,EAAmBlG,GAAuB4F,EAAWK,EAAIrI,UACzDyG,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAM8B,EA3HO,SAAC5F,EAAW8D,GACjC,IAAK,IAAI/uB,EAAI,EAAG+M,EAAMgiB,EAAKhpB,OAAQ/F,EAAI+M,EAAK/M,IAAK,CAC7C,IAAMgoB,EAAQ+G,EAAK/uB,GACnBirB,EAAYwC,GAA8BxC,EAAWjD,GAEzD,OAAOiD,EAsHuB6F,CAAiBF,EAAkB7B,EAAKgC,WAC9DJ,EAAIN,OAAO/B,kBAAkBuC,EAAeN,QAE5CtC,GAAqBqC,EAAWM,EAAkBL,EAAY,CAC1DnC,cAAeuC,EAAIvC,cACnBD,mBAAoBqB,GAA+BiB,QAMtDO,GAA4B,SAAC5B,EAAsBF,EAAYC,GACxE,IAAM8B,EAAmB7B,EAAqB6B,iBAE9C,IAAK,IAAMjB,KAAUiB,EAAkB,CACnC,IACMd,EADYc,EAAiBjB,GACNze,OACvB+d,EAAsBH,EAAe5d,OAAOge,SAC5C2B,GAAwB/B,EAAeoB,WAAWW,uBACpD/B,EAAeoB,WAAWW,sBAAsBf,EAAYhB,EAAe5d,QAC/E,GAAI4e,EAAWZ,WAAaD,GAAuB4B,EAAuB,CACtE,IAAMC,EAAgBhB,EAAW7H,SACjC0G,GAAyBmC,EAAejC,EAAY,CAChDE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXa,MAKFiB,GAAqB,SAAChC,GAA6C,IAAvB7d,EAAuB,uDAAd,GAAIyW,EAAU,aACxEqJ,SACEC,EAAkB/f,EAAO+f,gBACzBhJ,EAAW/W,EAAO+W,SAClB/mB,EAASgQ,EAAOye,OAAhB,IAA0Bze,EAAOge,SAGnC8B,EADAC,EACkBlC,EAAqBS,eAErBT,EAAqB6B,iBAG1B,OAAb3I,SACO+I,EAAgB9vB,GAEvB8vB,EAAgB9vB,GAAO,CACnBymB,QACAzW,WAQCggB,GAAyB,SAACrF,EAAWC,EAAWqF,GACzD,IAAMC,EAAsBvF,EAAUnU,QAAO,SAACC,EAAKrI,GAM/C,MAL+B,WAA3BA,EAAM+hB,YAAYnxB,KAClByX,EAAIxN,KAAJ,MAAAwN,EAAA,GAAYmU,EAAUjT,QAAO,SAAAd,GAAA,OAA0C,IAA7BA,EAAUuZ,OAAOhiB,QACpDA,KAAS6hB,GAChBxZ,EAAIxN,KAAKmF,GAENqI,IACR,IACH,OAAO5K,MAAMC,KAAK,IAAI+S,IAAIqR,IAAsBllB,KAAI,SAAAoD,GAAA,OAASA,EAAMuQ,WAU1D3P,GAAwB,SAACZ,EAAO1O,GACzC,OAAI0O,EAAMkQ,aACClQ,EAAMkQ,cAANlQ,CAAqB1O,GAEzBA,G,0PCjII2wB,G,WA/hBX,c,4FAAwB,SACpB,IAAIC,SAEJztB,KAAKuqB,QAAU,KACfvqB,KAAK+jB,YAAc,GACnB/jB,KAAKskB,oBAAsB,GAC3BtkB,KAAKoqB,UAAY,GANG,2BAARV,EAAQ,qBAARA,EAAQ,gBAQE,IAAlBA,EAAO/nB,SAAkB8rB,EAAS/D,EAAO,cAAe8D,GAExDxtB,KAAK0W,eAAiB+W,EAAO/W,eAC7B1W,KAAK2Q,YAAc8c,EAAO9c,YAC1B3Q,KAAKopB,YAAcqE,EAAOrE,YAC1BppB,KAAKuqB,QAAUkD,EACfztB,KAAKipB,mBAAqBjpB,KAAKuqB,QAAQtB,mBACvCjpB,KAAK0tB,gBAAkBpjB,IACvBtK,KAAKia,wBAAwBkM,0BAE7BsC,GAAUA,cAACzoB,MAAX,OAAoB0pB,IACpB1pB,KAAK0tB,gBAAkB1tB,KAAKipB,mBAAmB9sB,KAC/C6D,KAAKia,wBAAwBkM,wBAC7BnmB,KAAK2tB,sBAAwB,CACzBlC,eAAgB,GAChBoB,iBAAkB,K,+CA0B1B,OAAO7sB,KAAKoQ,gBAAgBvH,OAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEyP,c,gCAY9C,OAAO3L,KAAK0tB,kB,sCAIZ,OAAO1tB,KAAK4tB,c,8CAMZ,OAFA5tB,KAAK4tB,YAAczK,GAAa,CAACnjB,KAAK2Q,YAAa3Q,KAAK0W,gBACnD1W,KAAK8lB,uBAAwB9lB,KAAK0tB,iBAChC1tB,O,6CAIP,OAAOA,KAAKipB,qB,2BAiCV4E,EAAU9d,GACZ,OAAOH,EAAa5P,KAAM6tB,EAAU9d,K,kCAuB3B8d,GACT,OAAOje,EAAa5P,KAAM6tB,EAAU3T,GAAkBla,KAAM6tB,IAAW,K,4BAqBpEC,GACH,OAAOxT,GAAMta,KAAM8tB,K,iCAoBXC,GACR,OAAO1X,GAAWrW,KAAM+tB,K,6BAkDpBnI,EAAUzY,GACd,IAAM6gB,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAMf,OAJAta,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,IAC/BpQ,KAAOoQ,EAAOpQ,MAAQixB,EAAUjxB,KDmChB,SAACqnB,EAAUwB,EAAUgC,EAAcqG,GAC9D,IAAIC,EAAe,GAEbnxB,EAAS6qB,EAAT7qB,KAEAirB,EAAS5D,EAAS8B,MAAM+H,EAAYxG,WACpC0G,EAAmBzI,GACrBsC,EACApC,EACAgC,EACAxD,EACAU,IAEEL,EAAYD,GAAcznB,GAAM0nB,UAItC,OAFAkD,GAAqBK,EAAQmG,EAAiB1J,EAAU,IAAKL,EAAUwD,EAAchC,GAEjFnB,EAAU9iB,OAAS,GACnBusB,EAAe9J,EAAS8B,MAAM+H,EAAYxG,WAC1CE,GAAqBuG,EAAcC,EAAiB1J,EAAU,IAAKL,EAAUwD,EAAchC,GACpF,CAACoC,EAAQkG,IAGblG,ECvDIoG,CACHpuB,KACA4lB,EACAzY,EAJgB,CAAEsa,UAAWta,EAAOsa,c,gCA4BxC,OAAQznB,KAAK2Q,YAAYhP,SAAW3B,KAAK0W,eAAe/U,S,8BAUnC,IAAlB8lB,IAAkB,yDACf9B,EAAW,IAAI3lB,KAAKstB,YAAYttB,MAMtC,OALIynB,EACA9B,EAAS0I,UAAUruB,MAEnB2lB,EAAS0I,UAAU,MAEhB1I,I,8BA8CFmC,EAAW3a,GAChB,IAAM6gB,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAEfta,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,GACtC,IAAMigB,EAAcptB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKonB,GACtBrwB,EAASoQ,EAATpQ,KACFswB,EAAsBF,GAAuBrF,EAAWC,EAAWqF,GAErE3U,SAEA1b,IAAS8B,EAAcG,IASvByZ,EAAY,CARUoP,GAAiB7nB,KAAMqtB,EAAqB,CAC9DtwB,KAAM8B,EAAcC,OACpB2oB,UAAWta,EAAOsa,WACnBM,GACkBF,GAAiB7nB,KAAMqtB,EAAqB,CAC7DtwB,KAAM8B,EAAcE,QACpB0oB,UAAWta,EAAOsa,WACnBM,IAIHtP,EADsBoP,GAAiB7nB,KAAMqtB,EAAqBlgB,EAAQ4a,GAI9E,OAAOtP,I,wCAIP,OAAOzY,KAAKsuB,e,8CAWZ,OAPAtuB,KAAKsuB,aAAetuB,KAAK4tB,YAAY/kB,OAAO8K,QAAO,SAACC,EAAK2a,EAAU3yB,GAK/D,OAJAgY,EAAI2a,EAASpyB,QAAU,CACnB+F,MAAOtG,EACP4yB,IAAKD,EAAS5iB,UAEXiI,IACR,IACI5T,O,gCAWPA,KAAKuqB,SAAWvqB,KAAKuqB,QAAQkE,YAAYzuB,MACzCA,KAAKuqB,QAAU,KACfvqB,KAAKoqB,UAAUthB,SAAQ,SAACuhB,GACpBA,EAAME,QAAU,QAEpBvqB,KAAKoqB,UAAY,K,kCA6BRC,GACT,IAAIxW,EAAM7T,KAAKoqB,UAAUnV,WAAU,SAAAyZ,GAAA,OAAWA,IAAYrE,MACjD,IAATxW,GAAa7T,KAAKoqB,UAAU5iB,OAAOqM,EAAK,K,gCAQjC8a,GACP3uB,KAAKuqB,SAAWvqB,KAAKuqB,QAAQkE,YAAYzuB,MACzCA,KAAKuqB,QAAUoE,EACfA,GAAUA,EAAOvE,UAAUhkB,KAAKpG,Q,kCA4BhC,OAAOA,KAAKuqB,U,oCA6BZ,OAAOvqB,KAAKoqB,Y,uCA4BZ,OAAOpqB,KAAK+jB,c,+CA4BZ,OAAO/jB,KAAKskB,wB,uwBCwSL1mB,G,YAtxBX,aAAsB,O,4FAAA,oCAANsJ,EAAM,qBAANA,EAAM,sB,iKAAA,2EACTA,KADS,OAGlB,EAAK0nB,eAAiB,GAHJ,E,0WAgFb/nB,GAQLA,EAAUvK,OAAO8Q,OAAO,GAPL,CACfyhB,MAAO,MACPxsB,UAAW,KACXysB,SAAS,EACTC,cAAc,EACd/c,KAAM,IAE8BnL,GACxC,IAAMgC,EAAS7I,KAAK8lB,uBAAuBjd,OAErCmmB,EAAgBrZ,GAAY5Z,KAC9BiE,KACAA,KAAK8lB,uBAAuBjd,OAC5B7I,KAAK2Q,YACL9J,EAAQkoB,aAAelmB,EAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEC,UAAQ8G,OAASjD,KAAK0W,eAC/D7P,EAAQmL,KACR,CACI8D,WAA8B,WAAlBjP,EAAQgoB,MACpBhZ,SAAUhP,EAAQioB,UAI1B,IAAKjoB,EAAQxE,UACT,OAAO2sB,EAxBG,IA2BN3sB,EAAcwE,EAAdxE,UACA0I,EAAuBikB,EAAvBjkB,KAAMY,EAAiBqjB,EAAjBrjB,OAAQ8J,EAASuZ,EAATvZ,KAChBwZ,EAAatjB,EAAOxD,KAAK,SAAA9E,GAAA,OAAKA,EAAElH,QAEhC+yB,EADgB5yB,OAAO0J,KAAK3D,GACAsR,QAAO,SAACC,EAAK5F,GAC3C,IAAM6F,EAAMob,EAAW9oB,QAAQ6H,GAI/B,OAHa,IAAT6F,GACAD,EAAIxN,KAAK,CAACyN,EAAKxR,EAAU2L,KAEtB4F,IACR,IAgCH,MA9BsB,WAAlB/M,EAAQgoB,MACRK,EAAYpmB,SAAQ,SAACqmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBpkB,EAAKqkB,GAAMtmB,SAAQ,SAACoK,EAAOoc,GACvBvkB,EAAKqkB,GAAME,GAAYD,EAAMtzB,UACzBmE,EACAgT,EACAuC,EAAK6Z,GACL3jB,EAAOyjB,UAKnBrkB,EAAKjC,SAAQ,SAACoK,EAAOoc,GACjBJ,EAAYpmB,SAAQ,SAACqmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBjc,EAAMkc,GAAQC,EAAMtzB,UAChBmE,EACAgT,EAAMkc,GACN3Z,EAAK6Z,GACL3jB,EAAOyjB,UAMhBJ,I,gCASP,IAAMriB,EAAa3M,KAAK2Q,YAClB4e,EAAM,GAER5iB,EAAWhL,QACMgL,EAAWE,MAAM,KAEzB/D,SAAQ,SAACuK,GAAQ,MACHA,EAAIxG,MAAM,KAAK1E,IAAIO,QADhB,UACjBsE,EADiB,KACVC,EADU,KAGtBA,OAAc/M,IAAR+M,EAAoBA,EAAMD,EAChCuiB,EAAInpB,KAAJ,MAAAmpB,EAAA,GAAYvmB,MAAMiE,EAAMD,EAAQ,GAAGwiB,OAAOrnB,KAAI,SAAC2R,EAAGjG,GAAJ,OAAY7G,EAAQ6G,UAI1E,OAAO0b,I,8BA0BFE,GAAwD,IAA7C/W,EAA6C,uDAAlC,GAAIvL,EAA8B,uDAArB,CAAEsa,WAAW,GAC/CkC,EAAgBA,GAAG8F,EAAUxsB,OAC/BymB,EAAS,CAAC1pB,KAAMyvB,EAAW/W,GACzBiB,EAAenB,gBAAWkR,GAgBhC,OAdAnF,GACIvkB,KACA2Z,EACApL,EAAeG,QACf,CAAE+gB,YAAW9F,gBAAe1Q,eAAgBV,GAAaU,kBACzDP,GAGAvL,EAAOsa,UACP9N,EAAa0U,UAAUruB,MAEvB2Z,EAAa0U,UAAU,MAGpB1U,I,2BAsDL5F,GAA+C,IAA/B5G,EAA+B,uDAAtB,CAAEsa,WAAW,GAClCxE,EAAUjjB,KAAK+mB,QAAQ,CACzB8H,MAAO,MACP7c,KAAM+B,IAEJoK,EAAS8E,EAAQtX,OAAOxD,KAAI,SAAAoD,GAAA,OAASA,EAAMpP,QAC3CuzB,EAAe,CAACvR,GAAQ8C,OAAOgC,EAAQlY,MAEvC4kB,EAAW,IAAI3vB,KAAKstB,YAAYoC,EAAczM,EAAQtX,OAAQ,CAAEmR,WAAY,WAgBlF,OAdAyH,GACIvkB,KACA2vB,EACAphB,EAAeO,KACf3B,EACA4G,GAGA5G,EAAOsa,UACPkI,EAAStB,UAAUruB,MAEnB2vB,EAAStB,UAAU,MAGhBsB,I,gCAwBA/jB,EAAM/E,GACb+E,EAAOA,GAAQ5L,KAAKopB,YACpBviB,EAAUvK,OAAO8Q,OAAO,GAAI,CAAE6U,eAAgB,KAAOpb,GAErD,IAAMgC,EAAS7I,KAAKoQ,gBAAgBvH,OAC9B+mB,EAAU/mB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAEnD,mBAC5B2e,EAAYD,EAAQ,GAAGjuB,OACzBmuB,SACAC,SACAC,SAEJ,GAAIpkB,IAAS7N,EAAWC,UAEpB,IADA8xB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMza,EAAM,GACZ,IAAK0a,EAAS,EAAGA,EAASnnB,EAAOlH,OAAQquB,IACrC1a,EAAIzM,EAAOmnB,GAAQ7zB,QAAUyzB,EAAQI,GAAQD,GAEjDD,EAAe1pB,KAAKkP,QAErB,GAAI1J,IAAS7N,EAAWE,QAAS,CAEpC,IADA6xB,EAAiB,CAACjnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK4D,EAAQob,iBACpD8N,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMza,EAAM,GACZ,IAAK0a,EAAS,EAAGA,EAASnnB,EAAOlH,OAAQquB,IACrC1a,EAAIlP,KAAKwpB,EAAQI,GAAQD,IAE7BD,EAAe1pB,KAAKkP,EAAIrS,KAAK4D,EAAQob,iBAEzC6N,EAAiBA,EAAe7sB,KAAK,UAClC,IAAI2I,IAAS7N,EAAWG,QAU3B,MAAM,IAAIuS,MAAJ,aAAuB7E,EAAvB,qBARN,IADAkkB,EAAiB,CAACjnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,WAC/B4zB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMza,EAAM,GACZ,IAAK0a,EAAS,EAAGA,EAASnnB,EAAOlH,OAAQquB,IACrC1a,EAAIlP,KAAKwpB,EAAQI,GAAQD,IAE7BD,EAAe1pB,KAAKkP,IAM5B,OAAOwa,I,+BAGDvkB,GACN,IAAMyI,EAAYzI,EAAMpP,OACxB6D,KAAK0W,gBAAL,IAA2B1C,EAC3B,IAAMoP,EAAoBpjB,KAAKipB,mBACzBgH,EAAqB7M,EAAkB+F,oBACvCjY,EAAgB3F,EAAM2F,gBACtB+R,EAAU1X,EAAM0F,aAAalG,KAEnC,GAAKqY,EAAkB/X,YAAYE,EAAMpP,QAKlC,CACH,IAAM4M,EAAaqa,EAAkBva,OAAOoM,WAAU,SAAAib,GAAA,OAAaA,EAAU/zB,SAAW6X,KACxFjL,GAAc,IAAMqa,EAAkBva,OAAOE,GAAcwC,QAN3D6X,EAAkBva,OAAOzC,KAAKmF,GAC9B0kB,EAAmBnnB,SAAQ,SAACV,EAAKxM,GAC7BwM,EAAImD,EAAMpP,QAAU,IAAI6P,EAAMkF,EAActV,GAAIqnB,EAAQrnB,GAAI2P,MAapE,OALA6X,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KAEnC1L,KAAKia,wBAAwBkM,wBACtBnmB,O,wCAuCQ2L,EAAQwkB,EAAYhjB,GAAQ,WAC3CxB,EAAS2c,GAAmB3c,GAC5BwB,EAAS7Q,OAAO8Q,OAAO,GAAI,CAAEqa,WAAW,EAAM2I,YAAY,GAASjjB,GAEnE,IAAM6Z,EAAehnB,KAAK4mB,kBACpByJ,EAAUF,EAAW/b,MAAM,EAAG+b,EAAWxuB,OAAS,GAClD2uB,EAAaH,EAAWA,EAAWxuB,OAAS,GAElD,GAAIqlB,EAAarb,EAAOxP,QAAUgR,EAAOijB,WACrC,MAAM,IAAI3f,MAAS9E,EAAOxP,KAApB,sCAGV,IAAMo0B,EAAkBF,EAAQloB,KAAI,SAACoD,GACjC,IAAMilB,EAAYxJ,EAAazb,GAC/B,IAAKilB,EAED,MAAM,IAAI/f,MAASlF,EAAb,gCAEV,OAAOilB,EAAUtuB,SAGfgkB,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAE1BgJ,EAAKvK,EAAM9V,gBAAgBvH,OAC3B6nB,EAAiBH,EAAgBpoB,KAAI,SAAA0L,GAAA,OAAO4c,EAAG5c,MAEjDkG,EAAc,GACdC,EAAgB,kBAAM,EAAK1I,gBAEzBqf,EAAiB,GACvBjkB,EAAmBwZ,EAAMvV,aAAa,SAAC/U,GACnC,IAAMg1B,EAAaF,EAAevoB,KAAI,SAAAoD,GAAA,OAASA,EAAM0F,aAAalG,KAAKnP,MACvE+0B,EAAe/0B,GAAK00B,kBAAcM,GAAd,QAA0Bh1B,EAAGoe,EAAeD,QAhCzB,MAkC3BgE,GAAa,CAAC4S,GAAiB,CAAChlB,GAAS,CAACA,EAAOxP,OAA1DoP,EAlCoC,WA6C3C,OAVA2a,EAAM2K,SAAStlB,GAEfgZ,GACIvkB,KACAkmB,EACA3X,EAAeK,QACf,CAAEzB,OAAQxB,EAAQ9C,OAAQwnB,GAC1BC,GAGGpK,I,gCAWA2E,GAA2D,IAA9C1d,EAA8C,uDAArC,GAAI2jB,EAAiC,aAAjB3E,EAAiB,uDAAJ,GACxDe,EAAkB/f,EAAO+f,gBACzBhC,EAAsB/d,EAAOge,SAC7B4F,EAAU5jB,EAAO4jB,QACjB7E,EAAYzB,GAAiBzqB,MAC7BgrB,EAAuBkB,EAAUyB,sBACjCtB,EAAmB/B,GAAoBtqB,MACvC8qB,EAAa,CACfwB,aAAcD,EACdzI,MAAOsI,GAgBX,OAbA4E,GAAkB9D,GAAmBhC,EAAsB7d,EAAQnN,MACnE4qB,GAAyBC,EAAaC,EAAY,CAAEE,uBAAsBG,SAAUD,GAChF5uB,OAAO8Q,OAAO,CACV2jB,WACD5jB,IAEH+f,GACAN,GAA0B5B,EAAsBF,EAAY,CACxD3d,SACAgf,eAIDnsB,O,yBAUPgxB,EAAWpkB,GACX,OAAQokB,GACR,I5CplBmB,c4CqlBfhxB,KAAK4uB,eAAexoB,KAAKwG,GAG7B,OAAO5M,O,kCASEgxB,GACT,OAAQA,GACR,I5CnmBmB,c4ComBfhxB,KAAK4uB,eAAiB,GAI1B,OAAO5uB,O,wCAUQ6mB,EAAWkK,GAAS,WACf/wB,KAAK4uB,eACX9lB,SAAQ,SAAAud,GAAA,OAAMA,EAAGtqB,KAAK,EAAM8qB,EAAWkK,Q,0BA8CpDE,EAAkB9jB,GACnB,IAAM6Z,EAAehnB,KAAK4mB,kBAE1B,IAAKI,EAAaiK,GACd,MAAM,IAAIxgB,MAAJ,SAAmBwgB,EAAnB,kBAGV,IAAMC,EAAe/jB,EAAOhR,MAAW80B,EAAlB,UAErB,GAAIjK,EAAakK,GACb,MAAM,IAAIzgB,MAAJ,SAAmBygB,EAAnB,mBAGV,IAb2B,E7CvnB5B,SAAgCC,EAAcxkB,EAAYQ,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAASqjB,EAAmCjkB,EAAnCikB,UAAWtjB,EAAwBX,EAAxBW,QAASd,EAAeG,EAAfH,MAAOC,EAAQE,EAARF,IAD2B,EAEhDkkB,EAAalV,SAFmC,SAE9DoV,EAF8D,KAExDC,EAFwD,KAIhEvjB,IACDf,EAAmB,IAAVA,KAAiBA,GAASA,EAAQqkB,GAASA,EAAOrkB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMqkB,GAAUA,EAAO,EAAKrkB,EAErDmkB,IACAtjB,EAAU9J,KAAKutB,KAAKvtB,KAAKwtB,IAAIvkB,EAAMD,GAASokB,IAGhDrjB,EAAUF,EAAgBC,EAASd,EAAOC,IAG1Cc,EAAQ,GAAKsjB,GACbtjB,EAAQpG,QAAQ0pB,GAEhBtjB,EAAQA,EAAQpM,OAAS,IAAM2vB,GAC/BvjB,EAAQ3H,KAAKkrB,EAAO,GAIxB,IADA,IAAMpjB,EAAe,GACZtS,EAAI,EAAGA,EAAImS,EAAQpM,OAAS,EAAG/F,IACpCsS,EAAa9H,KAAK,CACd4G,MAAOe,EAAQnS,GACfqR,IAAKc,EAAQnS,EAAI,KAIzB,IAAM61B,EAAa,GAYnB,OAXA/kB,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMsX,EAAQie,EAAalgB,aAAalG,KAAKnP,GAC7C,GAAIsX,aAAiBhG,EACjBukB,EAAWrrB,KAAK8M,OADpB,CAKA,IAAM1R,EAAQyM,EAAgBC,EAAcgF,GAC5Cue,EAAWrrB,KAAQ5E,EAAMwL,MAAzB,IAAkCxL,EAAMyL,SAGrC,CAAEwkB,aAAYpU,KAAMtP,G6C2lBM2jB,CADR1xB,KAAKoQ,gBAAgB/E,YAAY4lB,GACWjxB,KAAK2Q,YAAaxD,GAA3EskB,EAdmB,EAcnBA,WAAYpU,EAdO,EAcPA,KAEdsU,EAAW5T,GAAa,CAAC0T,GAAa,CACxC,CACIt1B,KAAM+0B,EACNtlB,KAAMlN,EAAUE,UAChBkc,QAAS1c,EAAiBG,OAC1B8e,SACA,CAAC6T,IAAe,GAElBhL,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAWhC,OAVAvB,EAAM2K,SAASc,GAEfpN,GACIvkB,KACAkmB,EACA3X,EAAeM,IACd,CAAEoiB,mBAAkB9jB,SAAQ+jB,gBAC5B,MAGEhL,I,qCA8BP,OAAO,IAAItoB,EAHEoC,KAAK4xB,UAAU7zB,EAAWC,WACxBgC,KAAK6xB,e,iCA+CZpY,EAAcL,EAAWjM,GACjC,IAAM6Z,EAAehnB,KAAK4mB,kBAE1BnN,EAAa3Q,SAAQ,SAACkL,GAClB,IAAKgT,EAAahT,GACd,MAAM,IAAIvD,MAAJ,SAAmBuD,EAAnB,mCAId,IAAMga,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAKf,OF3iBuB,SAACrD,EAAU3K,GAAiD,IAAnCL,EAAmC,uDAAvB,SAAAjY,GAAA,OAAOA,GAAKgM,EAAW,aAEnFsa,EACAta,EADAsa,UAEElO,EAAgB6K,EAAShU,gBAAgB/E,YAJwC,EASnFqa,GACAtB,EAAS8B,MAAMuB,GACfrO,EACAjM,EACAiX,GACA,sCAAIsF,EAAJ,qBAAIA,EAAJ,uBAAepE,GAAuBA,aAAIoE,EAA3B,QAAmCjQ,EAAcF,QAPhEgM,EAPmF,EAOnFA,gBACAC,EARmF,EAQnFA,aASEsM,EAAY,GAoBlB,OAnBAx1B,OAAO0J,KAAKuf,GAAiBvT,OAAOlJ,SAAQ,SAACzF,GACzC,GAAIkiB,EAAgBliB,GAAI,CACpB,IAAM2kB,EAAS5D,EAAS8B,MAAMuB,GACxBgC,EAAajE,EAAaniB,GAChC2kB,EAAOrX,YAAc4U,EAAgBliB,GAAGJ,KAAK,KAC7C+kB,EAAO/N,wBAAwBkM,wBAI3BsB,GACAlD,GAAmBH,EAAU4D,EAAQzZ,EAAeC,OAAQrB,GAHtC,SAAAtE,GAAA,OAAU4Q,EAAa8N,OAAM,SAAAzN,GAAA,OAAKjR,EAAOiR,GAAGO,gBAAkBoP,EAAWzjB,KAAK8T,SAKxGkO,EAAOjE,YAAYiE,EAAOjE,YAAYpiB,OAAS,GAAGsiB,KAAOuB,EAAaniB,GAEtEyuB,EAAU1rB,KAAK4hB,OAKhB8J,EEsgBIC,CAAgB/xB,KAAMyZ,EAAcL,EAF3CjM,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,M,sCAyCmB,IAA9C6kB,EAA8C,uDAA/B,GAAIC,EAA2B,uDAAZ,GAAI9kB,EAAQ,aACnD6gB,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAET2F,EAAcptB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKonB,GACxB8E,EAA0B,CAAC,CAACD,IAalC,OAXA9kB,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,IACtC6kB,EAAeA,EAAarwB,OAASqwB,EAAe,CAAC,KAGxClpB,SAAQ,SAACqpB,EAAUv2B,GAC5Bs2B,EAAwBt2B,GAAKuxB,GAAuBA,GAADA,UAC3CgF,GADqB,GACRF,IACjBlK,EACAqF,MAGDjF,GAAiBnoB,KAAMkyB,EAAyB/kB,EAAQ4a,M,kDApuBhC5a,GAC/B,OAAOD,EAAkBI,iBAAiBH,K,+BA7B1C,OAAOoL,K,iCAOP,OAAOwK,K,iCAOP,OAAOjF,O,GAnES0P,ICxCTvW,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKK,GAA0CF,GAA1CE,IAAKC,GAAqCH,GAArCG,IAAK0a,GAAgC7a,GAAhC6a,MAAOC,GAAyB9a,GAAzB8a,KAAMC,GAAmB/a,GAAnB+a,MAAYC,GAAOhb,GAAZib,ICyBjDC,GAAY,CACdC,QC2LmB,sCAAIC,EAAJ,qBAAIA,EAAJ,uBACnB,SAAC/b,GAAqC,IAAjCzJ,EAAiC,uDAAxB,CAAEsa,WAAW,GACnBmL,EAAYhc,EACZic,SACEvJ,EAAc,GA8BpB,OA5BAqJ,EAAW7pB,SAAQ,SAAC+a,GAChB+O,EAAY/O,EAAU+O,GACtBtJ,EAAYljB,KAAZ,MAAAkjB,EAAA,EAAoBsJ,EAAU7O,cACzB8O,IACDA,EAAaD,MAIjBC,GAAcA,IAAeD,GAC7BC,EAAWC,UAIfF,EAAUtO,oBAAsB,GAChCC,GACI3N,EACAgc,EACArkB,EAAeI,QACf,KACA2a,GAGAnc,EAAOsa,UACPmL,EAAUvE,UAAUzX,GAEpBgc,EAAUvE,UAAU,MAGjBuE,ID5NXG,ICyHe,sCAAI7rB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGmc,IAAH,MAAAnc,EAAU1P,KDxH5CsgB,OC6BkB,sCAAItgB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4Q,OAAH,MAAA5Q,EAAa1P,KD5BlD8rB,QC4DmB,sCAAI9rB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGoc,QAAH,MAAApc,EAAc1P,KD3DpDsR,QCmJmB,sCAAItR,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4B,QAAH,MAAA5B,EAAc1P,KDlJpD+rB,kBE1B6B,sCAAI/rB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGqc,kBAAH,MAAArc,EAAwB1P,KF2BxE8K,KElBgB,sCAAI9K,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG5E,KAAH,MAAA4E,EAAW1P,KFmB9C0I,eACAyG,cACA6c,YGlCG,SAAsB1Y,EAAYC,GACrC,OAAO7K,EAAa4K,EAAYC,EAAYP,GAAkBM,EAAYC,IAAa,IHkCvFF,iBACAG,kBACAyY,clC3BG,SAAwB3Y,EAAYC,EAAY1K,GACnD,OAAOuK,GAAMC,GAAcC,EAAYC,EAAY1K,GAAW2K,GAAeF,EAAYC,EAAY1K,KkC2BrGuK,SACA5N,sBAGE0mB,G,KAAcA,QACpB92B,OAAO8Q,OAAOxP,GAAW,CACrB60B,aACAY,QACA9kB,iBACAzO,oBACA/B,aACAc,gBACAqO,oBACAkmB,WACA9U,iBACAgV,iBACDC,GAEY31B","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n // constructor (schema) {\n // super();\n // this.schema = schema;\n // this._dtf = new DateTimeFormatter(format);\n // }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from './model/dataConverter';\nimport DSVStringConverter from './defaultConverters/dsvStringConverter';\nimport JSONConverter from './defaultConverters/jsonConverter';\nimport DSVArrayConverter from './defaultConverters/dsvArrayConverter';\nimport AutoDataConverter from './defaultConverters/autoCoverter';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n *\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const { type, subtype, name } = unitSchema;\n if (type === FieldType.DIMENSION || type === FieldType.MEASURE) {\n if (!fieldRegistry.has(subtype)) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n } else {\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/src/converter/dataConverterStore.js b/src/converter/dataConverterStore.js index 6e3620e..96de471 100644 --- a/src/converter/dataConverterStore.js +++ b/src/converter/dataConverterStore.js @@ -1,8 +1,5 @@ import DataConverter from './model/dataConverter'; -import DSVStringConverter from './defaultConverters/dsvStringConverter'; -import JSONConverter from './defaultConverters/jsonConverter'; -import DSVArrayConverter from './defaultConverters/dsvArrayConverter'; -import AutoDataConverter from './defaultConverters/autoCoverter'; +import { DSVStringConverter, DSVArrayConverter, JSONConverter, AutoDataConverter } from './defaultConverters'; class DataConverterStore { constructor() { @@ -20,7 +17,7 @@ class DataConverterStore { } /** - * + * Sets the given converters in the store and returns the store * @param {Array} converters : contains array of converter instance * @return { Map } */ @@ -30,7 +27,7 @@ class DataConverterStore { } /** - * + * Registers a Converter of type DataConverter * @param {DataConverter} converter : converter Instance * @returns self */ @@ -43,7 +40,7 @@ class DataConverterStore { } /** - * + * Rempves a converter from store * @param {DataConverter} converter : converter Instance * @returns self */ diff --git a/src/converter/defaultConverters/autoCoverter.js b/src/converter/defaultConverters/autoConverter.js similarity index 100% rename from src/converter/defaultConverters/autoCoverter.js rename to src/converter/defaultConverters/autoConverter.js diff --git a/src/converter/defaultConverters/index.js b/src/converter/defaultConverters/index.js new file mode 100644 index 0000000..fdcf21f --- /dev/null +++ b/src/converter/defaultConverters/index.js @@ -0,0 +1,4 @@ +export { default as DSVStringConverter } from './dsvStringConverter'; +export { default as JSONConverter } from './jsonConverter'; +export { default as DSVArrayConverter } from './dsvArrayConverter'; +export { default as AutoDataConverter } from './autoConverter'; diff --git a/src/fields/parsers/temporal-parser/index.js b/src/fields/parsers/temporal-parser/index.js index 77ef92c..81fef24 100644 --- a/src/fields/parsers/temporal-parser/index.js +++ b/src/fields/parsers/temporal-parser/index.js @@ -10,17 +10,6 @@ import InvalidAwareTypes from '../../../invalid-aware-types'; * @implements {FieldParser} */ export default class TemporalParser extends FieldParser { - /** - * Initialize a new instance. - * - * @public - * @param {Object} schema - The schema object for the corresponding field. - */ - // constructor (schema) { - // super(); - // this.schema = schema; - // this._dtf = new DateTimeFormatter(format); - // } /** * Parses a single value of a field and returns the millisecond value. From 969183bbcf6fbebe9726034fc3eb3d32a69a0b2b Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Mon, 21 Oct 2019 17:49:52 +0530 Subject: [PATCH 16/20] feature/MZ-25: Change propagation identifier structure - Accept dimensions in an array - Range of measures will be an array of array --- dist/datamodel.js | 8989 ++++++++++++++++++++++++++++++++++++++++- dist/datamodel.js.map | 2 +- src/helper.js | 64 +- src/index.spec.js | 48 +- 4 files changed, 9045 insertions(+), 58 deletions(-) diff --git a/dist/datamodel.js b/dist/datamodel.js index b7b037f..e80d5f7 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,8989 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var i=t[r]={i:r,l:!1,exports:{}};return e[r].call(i.exports,i,i.exports,n),i.l=!0,i.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var i in e)n.d(r,i,function(t){return e[t]}.bind(null,i));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports=JSON.parse('{"name":"datamodel","description":"Relational algebra compliant in-memory tabular data store","homepage":"https://github.com/chartshq/datamodel","version":"2.2.1-alpha","license":"MIT","main":"dist/datamodel.js","keywords":["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],"author":"Muzejs.org (https://muzejs.org/)","repository":{"type":"git","url":"https://github.com/chartshq/datamodel.git"},"contributors":[{"name":"Akash Goswami","email":"akashgoswami90s@gmail.com"},{"name":"Subhash Haldar"},{"name":"Rousan Ali","email":"rousanali786@gmail.com","url":"https://rousan.io"},{"name":"Ujjal Kumar Dutta","email":"duttaujjalkumar@live.com"}],"dependencies":{"d3-dsv":"^1.0.8"},"devDependencies":{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0","chai":"3.5.0","cross-env":"^5.0.5","eslint":"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0","jsdoc":"3.5.5","json2yaml":"^1.1.0","karma":"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3","marked":"^0.5.0","mocha":"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0","webpack":"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},"scripts":{"test":"npm run lint && npm run ut","ut":"karma start karma.conf.js","utd":"karma start --single-run false --browsers Chrome karma.conf.js ","build":"npm run build:prod","build:dev":"webpack --mode development","build:prod":"webpack --mode production","start":"webpack-dev-server --config webpack.config.dev.js --mode development --open","lint":"eslint ./src","lint-errors":"eslint --quiet ./src","docs":"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}')},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",(function(){return o})),n.d(r,"DimensionSubtype",(function(){return u})),n.d(r,"MeasureSubtype",(function(){return c})),n.d(r,"FieldType",(function(){return f})),n.d(r,"FilteringMode",(function(){return l})),n.d(r,"GROUP_BY_FUNCTIONS",(function(){return s}));var i={};n.r(i),n.d(i,"Dimension",(function(){return Ce})),n.d(i,"Measure",(function(){return Me})),n.d(i,"FieldParser",(function(){return Ue})),n.d(i,"fieldRegistry",(function(){return it})),n.d(i,"columnMajor",(function(){return v}));var a={};n.r(a),n.d(a,"sum",(function(){return jn})),n.d(a,"avg",(function(){return An})),n.d(a,"min",(function(){return kn})),n.d(a,"max",(function(){return Dn})),n.d(a,"first",(function(){return Sn})),n.d(a,"last",(function(){return Fn})),n.d(a,"count",(function(){return Tn})),n.d(a,"sd",(function(){return Nn}));var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function p(e){return e instanceof Date?e:new Date(e)}function d(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,i=void 0;if(!n)return t;var a=n.toLowerCase();for(i=0,r=e.length;ii.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:L.CROSS,a=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};y.forEach((function(e,t){w[a[t].name]=e})),p&&L.CROSS!==i?o[h]=w:(o.push(w),p=!0,h=n)}else if((i===L.LEFTOUTER||i===L.RIGHTOUTER)&&!p){var _={},O=c.fields.length-1;y.forEach((function(e,t){_[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(_)}}))})),new En(o,a,{name:p})}function G(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:G;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=un(t,r))&&("function"==typeof i?z(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=q(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return un(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(a,i);z(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var Q,ee=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=K(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&$(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!un(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);$(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return un(t,e)}));var i=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Oe.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Oe.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=be)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new Cn(h,p,{name:c}),v}function Ae(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new Cn(a,r,{name:l})}function De(e,t,n){return J(e,t,n,!1,L.LEFTOUTER)}function Se(e,t,n){return J(t,e,n,!1,L.RIGHTOUTER)}var Fe=function(){function e(e,t){for(var n=0;nn&&(n=i))})),[t,n]}}],[{key:"parser",value:function(){return new $e}}]),t}(Me),tt=function(){function e(e,t){for(var n=0;n9999?"+"+yt(t,6):yt(t,4))+"-"+yt(e.getUTCMonth()+1,2)+"-"+yt(e.getUTCDate(),2)+(a?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"."+yt(a,3)+"Z":i?"T"+yt(n,2)+":"+yt(r,2)+":"+yt(i,2)+"Z":r||n?"T"+yt(n,2)+":"+yt(r,2)+"Z":"")}var bt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return lt;if(f)return f=!1,ft;var t,r,i=o;if(e.charCodeAt(i)===st){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===pt?f=!0:r===dt&&(f=!0,e.charCodeAt(o)===pt&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o0&&void 0!==arguments[0]?arguments[0]:[];return t.forEach((function(t){return e.store.set(t.type,t)})),this.store}},{key:"register",value:function(e){return e instanceof ct?(this.store.set(e.type,e),this):null}},{key:"unregister",value:function(e){return this.store.delete(e.type),this}},{key:"get",value:function(e){return this.store.has(e)?this.store.get(e):null}}]),e}(),Mt=function(){var e=null;return e||(e=new It)}(),xt=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function Ut(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Lt(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===U.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Lt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Jt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Lt(e._ancestorDerivation).concat(Lt(e._derivation)))},Gt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Jt(e,t)},zt=(Ut(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ut(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ut(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=xt(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return i(c,(function(e){return t(Vt(f,l,s,e),e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n){for(var r=n(e,t,0),i=1,a=e.length;i2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||V,a=n.filterByMeasure||!1,o=Zt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),i=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return i[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),p=r.data,d=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&p.forEach((function(e){var t=$t(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},p.length?function(e){var t=!c||h[$t(o,e,n)];return a?l.every((function(t){return e[t].internalValue>=d[t][0]&&e[t].internalValue<=d[t][1]}))&&t:t}:function(){return!1};var t,n,r,i,o,c,f,l,s,p,d,h})):[function(){return!1}];return i===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},en=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Gt(n,e,U.SELECT,{config:r},i)},tn=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Gt(e,i,U.PROJECT,{projField:t,config:n,actualProjField:a},null),i},nn=function(e,t,n,r){return t.map((function(t){return tn(e,t,n,r)}))},rn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},an=function(e){return e.map((function(e){return function(e){var t=e.type,n=e.subtype,r=e.name;if(t!==f.DIMENSION&&t!==f.MEASURE)throw new Error("DataModel doesn't support field type "+t+" used for "+r+" field");if(!it.has(n))throw new Error("DataModel doesn't support measure field subtype "+n+" used for "+r+" field")}(e=rn(e)),e}))},on=function(e,t,n,r){n=an(n),r=Object.assign(Object.assign({},ot),r);var i=Mt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=xt(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=at(f,n,c),s=S.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=Vt(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},un=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.nonTraversingModel,o=i.excludeModels||[];if(t!==a){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=cn(n,t);e(t,a,r,i)}))}},ln=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==U.GROUPBY}));)e=e._parent;return e},sn=function(e){for(;e._parent;)e=e._parent;return e},pn=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},dn=function(e,t,n,r){var i=void 0,a=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],i=[];else{var s,p=Object.values(o.mutableActions);!1!==u&&(p=p.filter((function(e){return e.config.sourceId!==c})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:i,models:e.model,path:pn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Lt(d),[e])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Lt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),b=t.groupByModel;f&&b&&(a=Qt(b,i,{filterByMeasure:f}),fn(b,a,m)),l.forEach((function(e){var t=Qt(y,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},yn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Lt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},mn=function(e,t){return e.numberFormat?e.numberFormat()(t):t},bn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),en(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=yn(e,i,r),u=void 0;a===l.ALL?u=[tn(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),tn(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=tn(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),wn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},_n=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=je.apply(void 0,i);return Gt(this,a,U.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Oe.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Gt(this,a,U.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new T(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=rn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,On(t).concat([e,p,s]))}));var h=at([d],[e],[e.name]),v=wn(h,1)[0];return c.addField(v),Gt(this,c,U.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=sn(this),c=u._propagationNameSpace,f=ln(this),l={groupByModel:f,model:u};return n&&vn(c,t,this),dn(e,l,{propagationNameSpace:c,sourceId:a},Object.assign({payload:o},t)),i&&hn(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=yn([].concat(On(e),On(t)),a,i)})),nn(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Oe}},{key:"Converters",get:function(){return Mt}},{key:"FieldTypes",get:function(){return it}}]),t}(gn),jn=me.sum,An=me.avg,kn=me.min,Dn=me.max,Sn=me.first,Fn=me.last,Tn=me.count,Nn=me.std,Rn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,B(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Gt(e,r,U.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n 9999 ? "+" + pad(year, 6) : pad(year, 4); +} + +function formatDate(date) { + var hours = date.getUTCHours(), + minutes = date.getUTCMinutes(), + seconds = date.getUTCSeconds(), + milliseconds = date.getUTCMilliseconds(); + return isNaN(date) ? "Invalid Date" : formatYear(date.getUTCFullYear(), 4) + "-" + pad(date.getUTCMonth() + 1, 2) + "-" + pad(date.getUTCDate(), 2) + (milliseconds ? "T" + pad(hours, 2) + ":" + pad(minutes, 2) + ":" + pad(seconds, 2) + "." + pad(milliseconds, 3) + "Z" : seconds ? "T" + pad(hours, 2) + ":" + pad(minutes, 2) + ":" + pad(seconds, 2) + "Z" : minutes || hours ? "T" + pad(hours, 2) + ":" + pad(minutes, 2) + "Z" : ""); +} + +/* harmony default export */ __webpack_exports__["default"] = (function (delimiter) { + var reFormat = new RegExp("[\"" + delimiter + "\n\r]"), + DELIMITER = delimiter.charCodeAt(0); + + function parse(text, f) { + var convert, + columns, + rows = parseRows(text, function (row, i) { + if (convert) return convert(row, i - 1); + columns = row, convert = f ? customConverter(row, f) : objectConverter(row); + }); + rows.columns = columns || []; + return rows; + } + + function parseRows(text, f) { + var rows = [], + // output rows + N = text.length, + I = 0, + // current character index + n = 0, + // current line number + t, + // current token + eof = N <= 0, + // current token followed by EOF? + eol = false; // current token followed by EOL? + + // Strip the trailing newline. + if (text.charCodeAt(N - 1) === NEWLINE) --N; + if (text.charCodeAt(N - 1) === RETURN) --N; + + function token() { + if (eof) return EOF; + if (eol) return eol = false, EOL; + + // Unescape quotes. + var i, + j = I, + c; + if (text.charCodeAt(j) === QUOTE) { + while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE) {} + if ((i = I) >= N) eof = true;else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;else if (c === RETURN) { + eol = true;if (text.charCodeAt(I) === NEWLINE) ++I; + } + return text.slice(j + 1, i - 1).replace(/""/g, "\""); + } + + // Find next delimiter or newline. + while (I < N) { + if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;else if (c === RETURN) { + eol = true;if (text.charCodeAt(I) === NEWLINE) ++I; + } else if (c !== DELIMITER) continue; + return text.slice(j, i); + } + + // Return last token before EOF. + return eof = true, text.slice(j, N); + } + + while ((t = token()) !== EOF) { + var row = []; + while (t !== EOL && t !== EOF) { + row.push(t), t = token(); + }if (f && (row = f(row, n++)) == null) continue; + rows.push(row); + } + + return rows; + } + + function preformatBody(rows, columns) { + return rows.map(function (row) { + return columns.map(function (column) { + return formatValue(row[column]); + }).join(delimiter); + }); + } + + function format(rows, columns) { + if (columns == null) columns = inferColumns(rows); + return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join("\n"); + } + + function formatBody(rows, columns) { + if (columns == null) columns = inferColumns(rows); + return preformatBody(rows, columns).join("\n"); + } + + function formatRows(rows) { + return rows.map(formatRow).join("\n"); + } + + function formatRow(row) { + return row.map(formatValue).join(delimiter); + } + + function formatValue(value) { + return value == null ? "" : value instanceof Date ? formatDate(value) : reFormat.test(value += "") ? "\"" + value.replace(/"/g, "\"\"") + "\"" : value; + } + + return { + parse: parse, + parseRows: parseRows, + format: format, + formatBody: formatBody, + formatRows: formatRows + }; +}); + +/***/ }), + +/***/ "./node_modules/d3-dsv/src/index.js": +/*!******************************************!*\ + !*** ./node_modules/d3-dsv/src/index.js ***! + \******************************************/ +/*! exports provided: dsvFormat, csvParse, csvParseRows, csvFormat, csvFormatBody, csvFormatRows, tsvParse, tsvParseRows, tsvFormat, tsvFormatBody, tsvFormatRows, autoType */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _dsv__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./dsv */ "./node_modules/d3-dsv/src/dsv.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "dsvFormat", function() { return _dsv__WEBPACK_IMPORTED_MODULE_0__["default"]; }); + +/* harmony import */ var _csv__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./csv */ "./node_modules/d3-dsv/src/csv.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "csvParse", function() { return _csv__WEBPACK_IMPORTED_MODULE_1__["csvParse"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "csvParseRows", function() { return _csv__WEBPACK_IMPORTED_MODULE_1__["csvParseRows"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "csvFormat", function() { return _csv__WEBPACK_IMPORTED_MODULE_1__["csvFormat"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "csvFormatBody", function() { return _csv__WEBPACK_IMPORTED_MODULE_1__["csvFormatBody"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "csvFormatRows", function() { return _csv__WEBPACK_IMPORTED_MODULE_1__["csvFormatRows"]; }); + +/* harmony import */ var _tsv__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./tsv */ "./node_modules/d3-dsv/src/tsv.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "tsvParse", function() { return _tsv__WEBPACK_IMPORTED_MODULE_2__["tsvParse"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "tsvParseRows", function() { return _tsv__WEBPACK_IMPORTED_MODULE_2__["tsvParseRows"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "tsvFormat", function() { return _tsv__WEBPACK_IMPORTED_MODULE_2__["tsvFormat"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "tsvFormatBody", function() { return _tsv__WEBPACK_IMPORTED_MODULE_2__["tsvFormatBody"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "tsvFormatRows", function() { return _tsv__WEBPACK_IMPORTED_MODULE_2__["tsvFormatRows"]; }); + +/* harmony import */ var _autoType__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./autoType */ "./node_modules/d3-dsv/src/autoType.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "autoType", function() { return _autoType__WEBPACK_IMPORTED_MODULE_3__["default"]; }); + + + + + + +/***/ }), + +/***/ "./node_modules/d3-dsv/src/tsv.js": +/*!****************************************!*\ + !*** ./node_modules/d3-dsv/src/tsv.js ***! + \****************************************/ +/*! exports provided: tsvParse, tsvParseRows, tsvFormat, tsvFormatBody, tsvFormatRows */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tsvParse", function() { return tsvParse; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tsvParseRows", function() { return tsvParseRows; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tsvFormat", function() { return tsvFormat; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tsvFormatBody", function() { return tsvFormatBody; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "tsvFormatRows", function() { return tsvFormatRows; }); +/* harmony import */ var _dsv__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./dsv */ "./node_modules/d3-dsv/src/dsv.js"); + + +var tsv = Object(_dsv__WEBPACK_IMPORTED_MODULE_0__["default"])("\t"); + +var tsvParse = tsv.parse; +var tsvParseRows = tsv.parseRows; +var tsvFormat = tsv.format; +var tsvFormatBody = tsv.formatBody; +var tsvFormatRows = tsv.formatRows; + +/***/ }), + +/***/ "./package.json": +/*!**********************!*\ + !*** ./package.json ***! + \**********************/ +/*! exports provided: name, description, homepage, version, license, main, keywords, author, repository, contributors, dependencies, devDependencies, scripts, default */ +/***/ (function(module) { + +module.exports = JSON.parse("{\"name\":\"datamodel\",\"description\":\"Relational algebra compliant in-memory tabular data store\",\"homepage\":\"https://github.com/chartshq/datamodel\",\"version\":\"2.2.1-alpha\",\"license\":\"MIT\",\"main\":\"dist/datamodel.js\",\"keywords\":[\"datamodel\",\"data\",\"relational\",\"algebra\",\"model\",\"muze\",\"fusioncharts\",\"table\",\"tabular\",\"operation\"],\"author\":\"Muzejs.org (https://muzejs.org/)\",\"repository\":{\"type\":\"git\",\"url\":\"https://github.com/chartshq/datamodel.git\"},\"contributors\":[{\"name\":\"Akash Goswami\",\"email\":\"akashgoswami90s@gmail.com\"},{\"name\":\"Subhash Haldar\"},{\"name\":\"Rousan Ali\",\"email\":\"rousanali786@gmail.com\",\"url\":\"https://rousan.io\"},{\"name\":\"Ujjal Kumar Dutta\",\"email\":\"duttaujjalkumar@live.com\"}],\"dependencies\":{\"d3-dsv\":\"^1.0.8\"},\"devDependencies\":{\"babel-cli\":\"6.26.0\",\"babel-core\":\"^6.26.3\",\"babel-eslint\":\"6.1.2\",\"babel-loader\":\"^7.1.4\",\"babel-plugin-transform-runtime\":\"^6.23.0\",\"babel-preset-env\":\"^1.7.0\",\"babel-preset-es2015\":\"^6.24.1\",\"babel-preset-flow\":\"^6.23.0\",\"chai\":\"3.5.0\",\"cross-env\":\"^5.0.5\",\"eslint\":\"3.19.0\",\"eslint-config-airbnb\":\"15.1.0\",\"eslint-plugin-import\":\"2.7.0\",\"eslint-plugin-jsx-a11y\":\"5.1.1\",\"eslint-plugin-react\":\"7.3.0\",\"istanbul-instrumenter-loader\":\"^3.0.0\",\"jsdoc\":\"3.5.5\",\"json2yaml\":\"^1.1.0\",\"karma\":\"1.7.1\",\"karma-chai\":\"0.1.0\",\"karma-chrome-launcher\":\"2.1.1\",\"karma-coverage-istanbul-reporter\":\"^1.3.0\",\"karma-mocha\":\"1.3.0\",\"karma-spec-reporter\":\"0.0.31\",\"karma-webpack\":\"2.0.3\",\"marked\":\"^0.5.0\",\"mocha\":\"3.4.2\",\"mocha-webpack\":\"0.7.0\",\"transform-runtime\":\"0.0.0\",\"webpack\":\"^4.12.0\",\"webpack-cli\":\"^3.0.7\",\"webpack-dev-server\":\"^3.1.4\"},\"scripts\":{\"test\":\"npm run lint && npm run ut\",\"ut\":\"karma start karma.conf.js\",\"utd\":\"karma start --single-run false --browsers Chrome karma.conf.js \",\"build\":\"npm run build:prod\",\"build:dev\":\"webpack --mode development\",\"build:prod\":\"webpack --mode production\",\"start\":\"webpack-dev-server --config webpack.config.dev.js --mode development --open\",\"lint\":\"eslint ./src\",\"lint-errors\":\"eslint --quiet ./src\",\"docs\":\"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json\"}}"); + +/***/ }), + +/***/ "./src/constants/index.js": +/*!********************************!*\ + !*** ./src/constants/index.js ***! + \********************************/ +/*! exports provided: DataFormat, FilteringMode, PROPAGATION, ROW_ID, DM_DERIVATIVES, JOINS, LOGICAL_OPERATORS */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "PROPAGATION", function() { return PROPAGATION; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ROW_ID", function() { return ROW_ID; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "DM_DERIVATIVES", function() { return DM_DERIVATIVES; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "JOINS", function() { return JOINS; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "LOGICAL_OPERATORS", function() { return LOGICAL_OPERATORS; }); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DataFormat", function() { return _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "FilteringMode", function() { return _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"]; }); + + +/** + * The event name for data propagation. + */ +var PROPAGATION = 'propagation'; + +/** + * The name of the unique row id column in DataModel. + */ +var ROW_ID = '__id__'; + +/** + * The enums for operation names performed on DataModel. + */ +var DM_DERIVATIVES = { + SELECT: 'select', + PROJECT: 'project', + GROUPBY: 'group', + COMPOSE: 'compose', + CAL_VAR: 'calculatedVariable', + BIN: 'bin', + SORT: 'sort' +}; + +var JOINS = { + CROSS: 'cross', + LEFTOUTER: 'leftOuter', + RIGHTOUTER: 'rightOuter', + NATURAL: 'natural', + FULLOUTER: 'fullOuter' +}; + +var LOGICAL_OPERATORS = { + AND: 'and', + OR: 'or' +}; + +/***/ }), + +/***/ "./src/converter/dataConverterStore.js": +/*!*********************************************!*\ + !*** ./src/converter/dataConverterStore.js ***! + \*********************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _model_dataConverter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./model/dataConverter */ "./src/converter/model/dataConverter.js"); +/* harmony import */ var _defaultConverters__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./defaultConverters */ "./src/converter/defaultConverters/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + +var DataConverterStore = function () { + function DataConverterStore() { + _classCallCheck(this, DataConverterStore); + + this.store = new Map(); + this.converters(this._getDefaultConverters()); + } + + _createClass(DataConverterStore, [{ + key: '_getDefaultConverters', + value: function _getDefaultConverters() { + return [new _defaultConverters__WEBPACK_IMPORTED_MODULE_1__["DSVStringConverter"](), new _defaultConverters__WEBPACK_IMPORTED_MODULE_1__["DSVArrayConverter"](), new _defaultConverters__WEBPACK_IMPORTED_MODULE_1__["JSONConverter"](), new _defaultConverters__WEBPACK_IMPORTED_MODULE_1__["AutoDataConverter"]()]; + } + + /** + * Sets the given converters in the store and returns the store + * @param {Array} converters : contains array of converter instance + * @return { Map } + */ + + }, { + key: 'converters', + value: function converters() { + var _this = this; + + var _converters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; + + _converters.forEach(function (converter) { + return _this.store.set(converter.type, converter); + }); + return this.store; + } + + /** + * Registers a Converter of type DataConverter + * @param {DataConverter} converter : converter Instance + * @returns self + */ + + }, { + key: 'register', + value: function register(converter) { + if (converter instanceof _model_dataConverter__WEBPACK_IMPORTED_MODULE_0__["default"]) { + this.store.set(converter.type, converter); + return this; + } + return null; + } + + /** + * Rempves a converter from store + * @param {DataConverter} converter : converter Instance + * @returns self + */ + + }, { + key: 'unregister', + value: function unregister(converter) { + this.store.delete(converter.type); + return this; + } + }, { + key: 'get', + value: function get(name) { + if (this.store.has(name)) { + return this.store.get(name); + } + return null; + } + }]); + + return DataConverterStore; +}(); + +var converterStore = function () { + var store = null; + + function getStore() { + store = new DataConverterStore(); + return store; + } + return store || getStore(); +}(); + +/* harmony default export */ __webpack_exports__["default"] = (converterStore); + +/***/ }), + +/***/ "./src/converter/defaultConverters/autoConverter.js": +/*!**********************************************************!*\ + !*** ./src/converter/defaultConverters/autoConverter.js ***! + \**********************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _model_dataConverter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../model/dataConverter */ "./src/converter/model/dataConverter.js"); +/* harmony import */ var _utils_auto_resolver__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/auto-resolver */ "./src/converter/utils/auto-resolver.js"); +/* harmony import */ var _enums_data_format__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../enums/data-format */ "./src/enums/data-format.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +var AutoDataConverter = function (_DataConverter) { + _inherits(AutoDataConverter, _DataConverter); + + function AutoDataConverter() { + _classCallCheck(this, AutoDataConverter); + + return _possibleConstructorReturn(this, (AutoDataConverter.__proto__ || Object.getPrototypeOf(AutoDataConverter)).call(this, _enums_data_format__WEBPACK_IMPORTED_MODULE_2__["default"].AUTO)); + } + + _createClass(AutoDataConverter, [{ + key: 'convert', + value: function convert(data, schema, options) { + return Object(_utils_auto_resolver__WEBPACK_IMPORTED_MODULE_1__["default"])(data, schema, options); + } + }]); + + return AutoDataConverter; +}(_model_dataConverter__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (AutoDataConverter); + +/***/ }), + +/***/ "./src/converter/defaultConverters/dsvArrayConverter.js": +/*!**************************************************************!*\ + !*** ./src/converter/defaultConverters/dsvArrayConverter.js ***! + \**************************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _model_dataConverter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../model/dataConverter */ "./src/converter/model/dataConverter.js"); +/* harmony import */ var _utils_dsv_arr__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/dsv-arr */ "./src/converter/utils/dsv-arr.js"); +/* harmony import */ var _enums_data_format__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../enums/data-format */ "./src/enums/data-format.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +var DSVArrayConverter = function (_DataConverter) { + _inherits(DSVArrayConverter, _DataConverter); + + function DSVArrayConverter() { + _classCallCheck(this, DSVArrayConverter); + + return _possibleConstructorReturn(this, (DSVArrayConverter.__proto__ || Object.getPrototypeOf(DSVArrayConverter)).call(this, _enums_data_format__WEBPACK_IMPORTED_MODULE_2__["default"].DSV_ARR)); + } + + _createClass(DSVArrayConverter, [{ + key: 'convert', + value: function convert(data, schema, options) { + return Object(_utils_dsv_arr__WEBPACK_IMPORTED_MODULE_1__["default"])(data, schema, options); + } + }]); + + return DSVArrayConverter; +}(_model_dataConverter__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (DSVArrayConverter); + +/***/ }), + +/***/ "./src/converter/defaultConverters/dsvStringConverter.js": +/*!***************************************************************!*\ + !*** ./src/converter/defaultConverters/dsvStringConverter.js ***! + \***************************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _model_dataConverter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../model/dataConverter */ "./src/converter/model/dataConverter.js"); +/* harmony import */ var _utils_dsv_str__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/dsv-str */ "./src/converter/utils/dsv-str.js"); +/* harmony import */ var _enums_data_format__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../enums/data-format */ "./src/enums/data-format.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +var DSVStringConverter = function (_DataConverter) { + _inherits(DSVStringConverter, _DataConverter); + + function DSVStringConverter() { + _classCallCheck(this, DSVStringConverter); + + return _possibleConstructorReturn(this, (DSVStringConverter.__proto__ || Object.getPrototypeOf(DSVStringConverter)).call(this, _enums_data_format__WEBPACK_IMPORTED_MODULE_2__["default"].DSV_STR)); + } + + _createClass(DSVStringConverter, [{ + key: 'convert', + value: function convert(data, schema, options) { + return Object(_utils_dsv_str__WEBPACK_IMPORTED_MODULE_1__["default"])(data, schema, options); + } + }]); + + return DSVStringConverter; +}(_model_dataConverter__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (DSVStringConverter); + +/***/ }), + +/***/ "./src/converter/defaultConverters/index.js": +/*!**************************************************!*\ + !*** ./src/converter/defaultConverters/index.js ***! + \**************************************************/ +/*! exports provided: DSVStringConverter, JSONConverter, DSVArrayConverter, AutoDataConverter */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _dsvStringConverter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./dsvStringConverter */ "./src/converter/defaultConverters/dsvStringConverter.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DSVStringConverter", function() { return _dsvStringConverter__WEBPACK_IMPORTED_MODULE_0__["default"]; }); + +/* harmony import */ var _jsonConverter__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./jsonConverter */ "./src/converter/defaultConverters/jsonConverter.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "JSONConverter", function() { return _jsonConverter__WEBPACK_IMPORTED_MODULE_1__["default"]; }); + +/* harmony import */ var _dsvArrayConverter__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dsvArrayConverter */ "./src/converter/defaultConverters/dsvArrayConverter.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DSVArrayConverter", function() { return _dsvArrayConverter__WEBPACK_IMPORTED_MODULE_2__["default"]; }); + +/* harmony import */ var _autoConverter__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./autoConverter */ "./src/converter/defaultConverters/autoConverter.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "AutoDataConverter", function() { return _autoConverter__WEBPACK_IMPORTED_MODULE_3__["default"]; }); + + + + + + +/***/ }), + +/***/ "./src/converter/defaultConverters/jsonConverter.js": +/*!**********************************************************!*\ + !*** ./src/converter/defaultConverters/jsonConverter.js ***! + \**********************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _model_dataConverter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../model/dataConverter */ "./src/converter/model/dataConverter.js"); +/* harmony import */ var _utils_flat_json__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils/flat-json */ "./src/converter/utils/flat-json.js"); +/* harmony import */ var _enums_data_format__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../enums/data-format */ "./src/enums/data-format.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +var JSONConverter = function (_DataConverter) { + _inherits(JSONConverter, _DataConverter); + + function JSONConverter() { + _classCallCheck(this, JSONConverter); + + return _possibleConstructorReturn(this, (JSONConverter.__proto__ || Object.getPrototypeOf(JSONConverter)).call(this, _enums_data_format__WEBPACK_IMPORTED_MODULE_2__["default"].FLAT_JSON)); + } + + _createClass(JSONConverter, [{ + key: 'convert', + value: function convert(data, schema, options) { + return Object(_utils_flat_json__WEBPACK_IMPORTED_MODULE_1__["default"])(data, schema, options); + } + }]); + + return JSONConverter; +}(_model_dataConverter__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (JSONConverter); + +/***/ }), + +/***/ "./src/converter/index.js": +/*!********************************!*\ + !*** ./src/converter/index.js ***! + \********************************/ +/*! exports provided: DataConverter, converterStore */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _dataConverterStore__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./dataConverterStore */ "./src/converter/dataConverterStore.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "converterStore", function() { return _dataConverterStore__WEBPACK_IMPORTED_MODULE_0__["default"]; }); + +/* harmony import */ var _model_dataConverter__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./model/dataConverter */ "./src/converter/model/dataConverter.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DataConverter", function() { return _model_dataConverter__WEBPACK_IMPORTED_MODULE_1__["default"]; }); + + + + + + +/***/ }), + +/***/ "./src/converter/model/dataConverter.js": +/*!**********************************************!*\ + !*** ./src/converter/model/dataConverter.js ***! + \**********************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +/** + * Interface for all data converters + */ +var DataConverter = function () { + function DataConverter(type) { + _classCallCheck(this, DataConverter); + + this._type = type; + } + + _createClass(DataConverter, [{ + key: 'convert', + value: function convert() { + throw new Error('Convert method not implemented.'); + } + }, { + key: 'type', + get: function get() { + return this._type; + } + }]); + + return DataConverter; +}(); + +/* harmony default export */ __webpack_exports__["default"] = (DataConverter); + +/***/ }), + +/***/ "./src/converter/utils/auto-resolver.js": +/*!**********************************************!*\ + !*** ./src/converter/utils/auto-resolver.js ***! + \**********************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _flat_json__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./flat-json */ "./src/converter/utils/flat-json.js"); +/* harmony import */ var _dsv_arr__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./dsv-arr */ "./src/converter/utils/dsv-arr.js"); +/* harmony import */ var _dsv_str__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./dsv-str */ "./src/converter/utils/dsv-str.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../utils */ "./src/utils/index.js"); + + + + + +/** + * Parses the input data and detect the format automatically. + * + * @param {string|Array} data - The input data. + * @param {Object} options - An optional config specific to data format. + * @return {Array.} Returns an array of headers and column major data. + */ +function Auto(data, schema, options) { + var converters = { FlatJSON: _flat_json__WEBPACK_IMPORTED_MODULE_0__["default"], DSVStr: _dsv_str__WEBPACK_IMPORTED_MODULE_2__["default"], DSVArr: _dsv_arr__WEBPACK_IMPORTED_MODULE_1__["default"] }; + var dataFormat = Object(_utils__WEBPACK_IMPORTED_MODULE_3__["detectDataFormat"])(data); + + if (!dataFormat) { + throw new Error('Couldn\'t detect the data format'); + } + + return converters[dataFormat](data, schema, options); +} + +/* harmony default export */ __webpack_exports__["default"] = (Auto); + +/***/ }), + +/***/ "./src/converter/utils/dsv-arr.js": +/*!****************************************!*\ + !*** ./src/converter/utils/dsv-arr.js ***! + \****************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../utils */ "./src/utils/index.js"); +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + + + +/** + * Parses and converts data formatted in DSV array to a manageable internal format. + * + * @param {Array.} arr - A 2D array containing of the DSV data. + * @param {Object} options - Option to control the behaviour of the parsing. + * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not. + * @return {Array} Returns an array of headers and column major data. + * @example + * + * // Sample input data: + * const data = [ + * ["a", "b", "c"], + * [1, 2, 3], + * [4, 5, 6], + * [7, 8, 9] + * ]; + */ +function DSVArr(arr, schema, options) { + if (!Array.isArray(schema)) { + throw new Error('Schema missing or is in an unsupported format'); + } + var defaultOption = { + firstRowHeader: true + }; + var schemaFields = schema.map(function (unitSchema) { + return unitSchema.name; + }); + options = Object.assign({}, defaultOption, options); + + var columns = []; + var push = Object(_utils__WEBPACK_IMPORTED_MODULE_0__["columnMajor"])(columns); + + var headers = schemaFields; + if (options.firstRowHeader) { + // If header present then remove the first header row. + // Do in-place mutation to save space. + headers = arr.splice(0, 1)[0]; + } + // create a map of the headers + var headerMap = headers.reduce(function (acc, h, i) { + return Object.assign(acc, _defineProperty({}, h, i)); + }, {}); + + arr.forEach(function (fields) { + var field = []; + schemaFields.forEach(function (schemaField) { + var headIndex = headerMap[schemaField]; + field.push(fields[headIndex]); + }); + return push.apply(undefined, field); + }); + return [schemaFields, columns]; +} + +/* harmony default export */ __webpack_exports__["default"] = (DSVArr); + +/***/ }), + +/***/ "./src/converter/utils/dsv-str.js": +/*!****************************************!*\ + !*** ./src/converter/utils/dsv-str.js ***! + \****************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var d3_dsv__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! d3-dsv */ "./node_modules/d3-dsv/src/index.js"); +/* harmony import */ var _dsv_arr__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./dsv-arr */ "./src/converter/utils/dsv-arr.js"); + + + +/** + * Parses and converts data formatted in DSV string to a manageable internal format. + * + * @todo Support to be given for https://tools.ietf.org/html/rfc4180. + * @todo Sample implementation https://github.com/knrz/CSV.js/. + * + * @param {string} str - The input DSV string. + * @param {Object} options - Option to control the behaviour of the parsing. + * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not. + * @param {string} [options.fieldSeparator=","] - The separator of two consecutive field. + * @return {Array} Returns an array of headers and column major data. + * @example + * + * // Sample input data: + * const data = ` + * a,b,c + * 1,2,3 + * 4,5,6 + * 7,8,9 + * ` + */ +function DSVStr(str, schema, options) { + var defaultOption = { + firstRowHeader: true, + fieldSeparator: ',' + }; + options = Object.assign({}, defaultOption, options); + + var dsv = Object(d3_dsv__WEBPACK_IMPORTED_MODULE_0__["dsvFormat"])(options.fieldSeparator); + return Object(_dsv_arr__WEBPACK_IMPORTED_MODULE_1__["default"])(dsv.parseRows(str), schema, options); +} + +/* harmony default export */ __webpack_exports__["default"] = (DSVStr); + +/***/ }), + +/***/ "./src/converter/utils/flat-json.js": +/*!******************************************!*\ + !*** ./src/converter/utils/flat-json.js ***! + \******************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../utils */ "./src/utils/index.js"); + + +/** + * Parses and converts data formatted in JSON to a manageable internal format. + * + * @param {Array.} arr - The input data formatted in JSON. + * @return {Array.} Returns an array of headers and column major data. + * @example + * + * // Sample input data: + * const data = [ + * { + * "a": 1, + * "b": 2, + * "c": 3 + * }, + * { + * "a": 4, + * "b": 5, + * "c": 6 + * }, + * { + * "a": 7, + * "b": 8, + * "c": 9 + * } + * ]; + */ +function FlatJSON(arr, schema) { + if (!Array.isArray(schema)) { + throw new Error('Schema missing or is in an unsupported format'); + } + + var header = {}; + var i = 0; + var insertionIndex = void 0; + var columns = []; + var push = Object(_utils__WEBPACK_IMPORTED_MODULE_0__["columnMajor"])(columns); + var schemaFieldsName = schema.map(function (unitSchema) { + return unitSchema.name; + }); + + arr.forEach(function (item) { + var fields = []; + schemaFieldsName.forEach(function (unitSchema) { + if (unitSchema in header) { + insertionIndex = header[unitSchema]; + } else { + header[unitSchema] = i++; + insertionIndex = i - 1; + } + fields[insertionIndex] = item[unitSchema]; + }); + push.apply(undefined, fields); + }); + + return [Object.keys(header), columns]; +} + +/* harmony default export */ __webpack_exports__["default"] = (FlatJSON); + +/***/ }), + +/***/ "./src/datamodel.js": +/*!**************************!*\ + !*** ./src/datamodel.js ***! + \**************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./helper */ "./src/helper.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./constants */ "./src/constants/index.js"); +/* harmony import */ var _operator__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./operator */ "./src/operator/index.js"); +/* harmony import */ var _operator_bucket_creator__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./operator/bucket-creator */ "./src/operator/bucket-creator.js"); +/* harmony import */ var _relation__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./relation */ "./src/relation.js"); +/* harmony import */ var _utils_reducer_store__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./utils/reducer-store */ "./src/utils/reducer-store.js"); +/* harmony import */ var _field_creator__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./field-creator */ "./src/field-creator.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./invalid-aware-types */ "./src/invalid-aware-types.js"); +/* harmony import */ var _value__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./value */ "./src/value.js"); +/* harmony import */ var _converter__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./converter */ "./src/converter/index.js"); +/* harmony import */ var _fields__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./fields */ "./src/fields/index.js"); +var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); + +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + +/* eslint-disable default-case */ + + + + + + + + + + + + + + +/** + * DataModel is an in-browser representation of tabular data. It supports + * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data + * processing opearators. + * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives + * definition of generic data processing operators which are not relational algebra complient. + * + * @public + * @class + * @extends Relation + * @memberof Datamodel + */ + +var DataModel = function (_Relation) { + _inherits(DataModel, _Relation); + + /** + * Creates a new DataModel instance by providing data and schema. Data could be in the form of + * - Flat JSON + * - DSV String + * - 2D Array + * + * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a + * {@link Schema | schema} for identifying the variables present in data. + * + * @constructor + * @example + * const data = loadData('cars.csv'); + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`}, + * { name: 'Cylinders', type: 'dimension' }, + * { name: 'Displacement', type: 'measure' }, + * { name: 'Horsepower', type: 'measure' }, + * { name: 'Weight_in_lbs', type: 'measure' }, + * { name: 'Acceleration', type: 'measure' }, + * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' }, + * { name: 'Origin', type: 'dimension' } + * ]; + * const dm = new DataModel(data, schema, { name: 'Cars' }); + * table(dm); + * + * @public + * + * @param {Array. | string | Array.} data Input data in any of the mentioned formats + * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the + * variables in schema has to be same. + * @param {object} [options] Optional arguments to specify more settings regarding the creation part + * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is + * assigned to the instance. + * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string. + */ + function DataModel() { + var _ref; + + _classCallCheck(this, DataModel); + + for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + var _this = _possibleConstructorReturn(this, (_ref = DataModel.__proto__ || Object.getPrototypeOf(DataModel)).call.apply(_ref, [this].concat(args))); + + _this._onPropagation = []; + return _this; + } + + /** + * Reducers are simple functions which reduces an array of numbers to a representative number of the set. + * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is + * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation. + * + * @public + * + * @return {ReducerStore} Singleton instance of {@link ReducerStore}. + */ + + + _createClass(DataModel, [{ + key: 'getData', + + + /** + * Retrieve the data attached to an instance in JSON format. + * + * @example + * // DataModel instance is already prepared and assigned to dm variable + * const data = dm.getData({ + * order: 'column', + * formatter: { + * origin: (val) => val === 'European Union' ? 'EU' : val; + * } + * }); + * console.log(data); + * + * @public + * + * @param {Object} [options] Options to control how the raw data is to be returned. + * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values + * are `'rows'` and `'columns'` + * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are + * the name of the variable needs to be formatted. The formatter function is called for each row passing the + * value of the cell for a particular row as arguments. The formatter is a function in the form of + * `function (value, rowId, schema) => { ... }` + * Know more about {@link Fomatter}. + * + * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like + * ``` + * { + * data, + * schema + * } + * ``` + */ + value: function getData(options) { + var defOptions = { + order: 'row', + formatter: null, + withUid: false, + getAllFields: false, + sort: [] + }; + options = Object.assign({}, defOptions, options); + var fields = this.getPartialFieldspace().fields; + + var dataGenerated = _operator__WEBPACK_IMPORTED_MODULE_3__["dataBuilder"].call(this, this.getPartialFieldspace().fields, this._rowDiffset, options.getAllFields ? fields.map(function (d) { + return d.name(); + }).join() : this._colIdentifier, options.sort, { + columnWise: options.order === 'column', + addUid: !!options.withUid + }); + + if (!options.formatter) { + return dataGenerated; + } + + var _options = options, + formatter = _options.formatter; + var data = dataGenerated.data, + schema = dataGenerated.schema, + uids = dataGenerated.uids; + + var fieldNames = schema.map(function (e) { + return e.name; + }); + var fmtFieldNames = Object.keys(formatter); + var fmtFieldIdx = fmtFieldNames.reduce(function (acc, next) { + var idx = fieldNames.indexOf(next); + if (idx !== -1) { + acc.push([idx, formatter[next]]); + } + return acc; + }, []); + + if (options.order === 'column') { + fmtFieldIdx.forEach(function (elem) { + var fIdx = elem[0]; + var fmtFn = elem[1]; + + data[fIdx].forEach(function (datum, datumIdx) { + data[fIdx][datumIdx] = fmtFn.call(undefined, datum, uids[datumIdx], schema[fIdx]); + }); + }); + } else { + data.forEach(function (datum, datumIdx) { + fmtFieldIdx.forEach(function (elem) { + var fIdx = elem[0]; + var fmtFn = elem[1]; + + datum[fIdx] = fmtFn.call(undefined, datum[fIdx], uids[datumIdx], schema[fIdx]); + }); + }); + } + + return dataGenerated; + } + + /** + * Returns the unique ids in an array. + * + * @return {Array} Returns an array of ids. + */ + + }, { + key: 'getUids', + value: function getUids() { + var rowDiffset = this._rowDiffset; + var ids = []; + + if (rowDiffset.length) { + var diffSets = rowDiffset.split(','); + + diffSets.forEach(function (set) { + var _set$split$map = set.split('-').map(Number), + _set$split$map2 = _slicedToArray(_set$split$map, 2), + start = _set$split$map2[0], + end = _set$split$map2[1]; + + end = end !== undefined ? end : start; + ids.push.apply(ids, _toConsumableArray(Array(end - start + 1).fill().map(function (_, idx) { + return start + idx; + }))); + }); + } + + return ids; + } + /** + * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which + * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this + * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy. + * + * DataModel by default provides definition of few {@link reducer | Reducers}. + * {@link ReducerStore | User defined reducers} can also be registered. + * + * This is the chained implementation of `groupBy`. + * `groupBy` also supports {@link link_to_compose_groupBy | composability} + * + * @example + * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } ); + * console.log(groupedDm); + * + * @public + * + * @param {Array.} fieldsArr - Array containing the name of dimensions + * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its + * not passed, or any variable is ommitted from the object, default aggregation function is used from the + * schema of the variable. + * + * @return {DataModel} Returns a new DataModel instance after performing the groupby. + */ + + }, { + key: 'groupBy', + value: function groupBy(fieldsArr) { + var reducers = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + var config = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : { saveChild: true }; + + var groupByString = '' + fieldsArr.join(); + var params = [this, fieldsArr, reducers]; + var newDataModel = _operator__WEBPACK_IMPORTED_MODULE_3__["groupBy"].apply(undefined, params); + + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["persistDerivations"])(this, newDataModel, _constants__WEBPACK_IMPORTED_MODULE_2__["DM_DERIVATIVES"].GROUPBY, { fieldsArr: fieldsArr, groupByString: groupByString, defaultReducer: _utils_reducer_store__WEBPACK_IMPORTED_MODULE_6__["default"].defaultReducer() }, reducers); + + if (config.saveChild) { + newDataModel.setParent(this); + } else { + newDataModel.setParent(null); + } + + return newDataModel; + } + + /** + * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details. + * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead + * returns a new DataModel instance containing the sorted data. + * + * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and + * the type of sorting `ASC` or `DESC`. + * + * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another + * level of sorting by `Acceleration` in `ASC` order. + * + * @example + * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file + * let sortedDm = dm.sort([ + * ["Origin", "DESC"] + * ["Acceleration"] // Default value is ASC + * ]); + * + * console.log(dm.getData()); + * console.log(sortedDm.getData()); + * + * // Sort with a custom sorting function + * sortedDm = dm.sort([ + * ["Origin", "DESC"] + * ["Acceleration", (a, b) => a - b] // Custom sorting function + * ]); + * + * console.log(dm.getData()); + * console.log(sortedDm.getData()); + * + * @text + * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using + * another variable which determines the order. + * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this + * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a + * particular `Origin` value. + * + * @example + * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file + * const sortedDm = dm.sort([ + * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]] + * ]); + * + * console.log(dm.getData()); + * console.log(sortedDm.getData()); + * + * @public + * + * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed. + * @return {DataModel} Returns a new instance of DataModel with sorted data. + */ + + }, { + key: 'sort', + value: function sort(sortingDetails) { + var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : { saveChild: false }; + + var rawData = this.getData({ + order: 'row', + sort: sortingDetails + }); + var header = rawData.schema.map(function (field) { + return field.name; + }); + var dataInCSVArr = [header].concat(rawData.data); + + var sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' }); + + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["persistDerivations"])(this, sortedDm, _constants__WEBPACK_IMPORTED_MODULE_2__["DM_DERIVATIVES"].SORT, config, sortingDetails); + + if (config.saveChild) { + sortedDm.setParent(this); + } else { + sortedDm.setParent(null); + } + + return sortedDm; + } + + /** + * Performs the serialization operation on the current {@link DataModel} instance according to the specified data + * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format, + * and during its serialization process, it converts its internal data format to the specified data type and returns + * that data regardless what type of data is used during the {@link DataModel} initialization. + * + * @example + * // here dm is the pre-declared DataModel instance. + * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: "," }); + * console.log(csvData); // The csv formatted data. + * + * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON); + * console.log(jsonData); // The json data. + * + * @public + * + * @param {string} type - The data type name for serialization. + * @param {Object} options - The optional option object. + * @param {string} options.fieldSeparator - The field separator character for DSV data type. + * @return {Array|string} Returns the serialized data. + */ + + }, { + key: 'serialize', + value: function serialize(type, options) { + type = type || this._dataFormat; + options = Object.assign({}, { fieldSeparator: ',' }, options); + + var fields = this.getFieldspace().fields; + var colData = fields.map(function (f) { + return f.formattedData(); + }); + var rowsCount = colData[0].length; + var serializedData = void 0; + var rowIdx = void 0; + var colIdx = void 0; + + if (type === _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].FLAT_JSON) { + serializedData = []; + for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) { + var row = {}; + for (colIdx = 0; colIdx < fields.length; colIdx++) { + row[fields[colIdx].name()] = colData[colIdx][rowIdx]; + } + serializedData.push(row); + } + } else if (type === _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].DSV_STR) { + serializedData = [fields.map(function (f) { + return f.name(); + }).join(options.fieldSeparator)]; + for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) { + var _row = []; + for (colIdx = 0; colIdx < fields.length; colIdx++) { + _row.push(colData[colIdx][rowIdx]); + } + serializedData.push(_row.join(options.fieldSeparator)); + } + serializedData = serializedData.join('\n'); + } else if (type === _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].DSV_ARR) { + serializedData = [fields.map(function (f) { + return f.name(); + })]; + for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) { + var _row2 = []; + for (colIdx = 0; colIdx < fields.length; colIdx++) { + _row2.push(colData[colIdx][rowIdx]); + } + serializedData.push(_row2); + } + } else { + throw new Error('Data type ' + type + ' is not supported'); + } + + return serializedData; + } + }, { + key: 'addField', + value: function addField(field) { + var fieldName = field.name(); + this._colIdentifier += ',' + fieldName; + var partialFieldspace = this._partialFieldspace; + var cachedValueObjects = partialFieldspace._cachedValueObjects; + var formattedData = field.formattedData(); + var rawData = field.partialField.data; + + if (!partialFieldspace.fieldsObj()[field.name()]) { + partialFieldspace.fields.push(field); + cachedValueObjects.forEach(function (obj, i) { + obj[field.name()] = new _value__WEBPACK_IMPORTED_MODULE_9__["default"](formattedData[i], rawData[i], field); + }); + } else { + var fieldIndex = partialFieldspace.fields.findIndex(function (fieldinst) { + return fieldinst.name() === fieldName; + }); + fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field); + } + + // flush out cached namespace values on addition of new fields + partialFieldspace._cachedFieldsObj = null; + partialFieldspace._cachedDimension = null; + partialFieldspace._cachedMeasure = null; + + this.__calculateFieldspace().calculateFieldsConfig(); + return this; + } + + /** + * Creates a new variable calculated from existing variables. This method expects the definition of the newly created + * variable and a function which resolves the value of the new variable from existing variables. + * + * Can create a new measure based on existing variables: + * @example + * // DataModel already prepared and assigned to dm variable; + * const newDm = dataModel.calculateVariable({ + * name: 'powerToWeight', + * type: 'measure' + * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]); + * + * + * Can create a new dimension based on existing variables: + * @example + * // DataModel already prepared and assigned to dm variable; + * const child = dataModel.calculateVariable( + * { + * name: 'Efficiency', + * type: 'dimension' + * }, ['horsepower', (hp) => { + * if (hp < 80) { return 'low'; }, + * else if (hp < 120) { return 'moderate'; } + * else { return 'high' } + * }]); + * + * @public + * + * @param {Object} schema - The schema of newly defined variable. + * @param {Array.} dependency - An array containing the dependency variable names and a resolver + * function as the last element. + * @param {Object} config - An optional config object. + * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child. + * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable. + * @return {DataModel} Returns an instance of DataModel with the new field. + */ + + }, { + key: 'calculateVariable', + value: function calculateVariable(schema, dependency, config) { + var _this2 = this; + + schema = Object(_helper__WEBPACK_IMPORTED_MODULE_1__["sanitizeUnitSchema"])(schema); + config = Object.assign({}, { saveChild: true, replaceVar: false }, config); + + var fieldsConfig = this.getFieldsConfig(); + var depVars = dependency.slice(0, dependency.length - 1); + var retrieveFn = dependency[dependency.length - 1]; + + if (fieldsConfig[schema.name] && !config.replaceVar) { + throw new Error(schema.name + ' field already exists in datamodel'); + } + + var depFieldIndices = depVars.map(function (field) { + var fieldSpec = fieldsConfig[field]; + if (!fieldSpec) { + // @todo dont throw error here, use warning in production mode + throw new Error(field + ' is not a valid column name.'); + } + return fieldSpec.index; + }); + + var clone = this.clone(config.saveChild); + + var fs = clone.getFieldspace().fields; + var suppliedFields = depFieldIndices.map(function (idx) { + return fs[idx]; + }); + + var cachedStore = {}; + var cloneProvider = function cloneProvider() { + return _this2.detachedRoot(); + }; + + var computedValues = []; + Object(_operator__WEBPACK_IMPORTED_MODULE_3__["rowDiffsetIterator"])(clone._rowDiffset, function (i) { + var fieldsData = suppliedFields.map(function (field) { + return field.partialField.data[i]; + }); + computedValues[i] = retrieveFn.apply(undefined, _toConsumableArray(fieldsData).concat([i, cloneProvider, cachedStore])); + }); + + var _createFields = Object(_field_creator__WEBPACK_IMPORTED_MODULE_7__["createFields"])([computedValues], [schema], [schema.name]), + _createFields2 = _slicedToArray(_createFields, 1), + field = _createFields2[0]; + + clone.addField(field); + + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["persistDerivations"])(this, clone, _constants__WEBPACK_IMPORTED_MODULE_2__["DM_DERIVATIVES"].CAL_VAR, { config: schema, fields: depVars }, retrieveFn); + + return clone; + } + + /** + * Propagates changes across all the connected DataModel instances. + * + * @param {Array} identifiers - A list of identifiers that were interacted with. + * @param {Object} payload - The interaction specific details. + * + * @return {DataModel} DataModel instance. + */ + + }, { + key: 'propagate', + value: function propagate(identifiers) { + var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + var addToNameSpace = arguments[2]; + var propConfig = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var isMutableAction = config.isMutableAction; + var propagationSourceId = config.sourceId; + var payload = config.payload; + var rootModel = Object(_helper__WEBPACK_IMPORTED_MODULE_1__["getRootDataModel"])(this); + var propagationNameSpace = rootModel._propagationNameSpace; + var rootGroupByModel = Object(_helper__WEBPACK_IMPORTED_MODULE_1__["getRootGroupByModel"])(this); + var rootModels = { + groupByModel: rootGroupByModel, + model: rootModel + }; + + addToNameSpace && Object(_helper__WEBPACK_IMPORTED_MODULE_1__["addToPropNamespace"])(propagationNameSpace, config, this); + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["propagateToAllDataModels"])(identifiers, rootModels, { propagationNameSpace: propagationNameSpace, sourceId: propagationSourceId }, Object.assign({ + payload: payload + }, config)); + + if (isMutableAction) { + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["propagateImmutableActions"])(propagationNameSpace, rootModels, { + config: config, + propConfig: propConfig + }, this); + } + + return this; + } + + /** + * Associates a callback with an event name. + * + * @param {string} eventName - The name of the event. + * @param {Function} callback - The callback to invoke. + * @return {DataModel} Returns this current DataModel instance itself. + */ + + }, { + key: 'on', + value: function on(eventName, callback) { + switch (eventName) { + case _constants__WEBPACK_IMPORTED_MODULE_2__["PROPAGATION"]: + this._onPropagation.push(callback); + break; + } + return this; + } + + /** + * Unsubscribes the callbacks for the provided event name. + * + * @param {string} eventName - The name of the event to unsubscribe. + * @return {DataModel} Returns the current DataModel instance itself. + */ + + }, { + key: 'unsubscribe', + value: function unsubscribe(eventName) { + switch (eventName) { + case _constants__WEBPACK_IMPORTED_MODULE_2__["PROPAGATION"]: + this._onPropagation = []; + break; + + } + return this; + } + + /** + * This method is used to invoke the method associated with propagation. + * + * @param {Object} payload The interaction payload. + * @param {DataModel} identifiers The propagated DataModel. + * @memberof DataModel + */ + + }, { + key: 'handlePropagation', + value: function handlePropagation(propModel, payload) { + var _this3 = this; + + var propListeners = this._onPropagation; + propListeners.forEach(function (fn) { + return fn.call(_this3, propModel, payload); + }); + } + + /** + * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing + * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary + * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field, + * instead, it creates a new field (variable) of type dimension and subtype binned. + * + * Binning can be configured by + * - providing custom bin configuration with non-uniform buckets, + * - providing bins count, + * - providing each bin size, + * + * When custom `buckets` are provided as part of binning configuration: + * @example + * // DataModel already prepared and assigned to dm variable + * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] } + * const binnedDM = dataModel.bin('horsepower', config); + * + * @text + * When `binsCount` is defined as part of binning configuration: + * @example + * // DataModel already prepared and assigned to dm variable + * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 } + * const binDM = dataModel.bin('horsepower', config); + * + * @text + * When `binSize` is defined as part of binning configuration: + * @example + * // DataModel already prepared and assigned to dm variable + * const config = { name: 'binnedHorsepower', binSize: 20, start: 5} + * const binDM = dataModel.bin('horsepower', config); + * + * @public + * + * @param {string} measureFieldName - The name of the target measure field. + * @param {Object} config - The config object. + * @param {string} [config.name] - The name of the new field which will be created. + * @param {string} [config.buckets] - An array containing the bucket ranges. + * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given. + * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given. + * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given. + * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given. + * @return {DataModel} Returns a new {@link DataModel} instance with the new field. + */ + + }, { + key: 'bin', + value: function bin(measureFieldName, config) { + var fieldsConfig = this.getFieldsConfig(); + + if (!fieldsConfig[measureFieldName]) { + throw new Error('Field ' + measureFieldName + ' doesn\'t exist'); + } + + var binFieldName = config.name || measureFieldName + '_binned'; + + if (fieldsConfig[binFieldName]) { + throw new Error('Field ' + binFieldName + ' already exists'); + } + + var measureField = this.getFieldspace().fieldsObj()[measureFieldName]; + + var _createBinnedFieldDat = Object(_operator_bucket_creator__WEBPACK_IMPORTED_MODULE_4__["createBinnedFieldData"])(measureField, this._rowDiffset, config), + binnedData = _createBinnedFieldDat.binnedData, + bins = _createBinnedFieldDat.bins; + + var binField = Object(_field_creator__WEBPACK_IMPORTED_MODULE_7__["createFields"])([binnedData], [{ + name: binFieldName, + type: _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION, + subtype: _enums__WEBPACK_IMPORTED_MODULE_0__["DimensionSubtype"].BINNED, + bins: bins + }], [binFieldName])[0]; + + var clone = this.clone(config.saveChild); + clone.addField(binField); + + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["persistDerivations"])(this, clone, _constants__WEBPACK_IMPORTED_MODULE_2__["DM_DERIVATIVES"].BIN, { measureFieldName: measureFieldName, config: config, binFieldName: binFieldName }, null); + + return clone; + } + + /** + * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance, + * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as + * the current one. + * This API is useful when a completely different {@link DataModel} but with same data as the current instance is + * needed. + * + * @example + * const dm = new DataModel(data, schema); + * const detachedDm = dm.detachedRoot(); + * + * // has different namespace + * console.log(dm.getPartialFieldspace().name); + * console.log(detachedDm.getPartialFieldspace().name); + * + * // has same data + * console.log(dm.getData()); + * console.log(detachedDm.getData()); + * + * @public + * + * @return {DataModel} Returns a detached {@link DataModel} instance. + */ + + }, { + key: 'detachedRoot', + value: function detachedRoot() { + var data = this.serialize(_enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].FLAT_JSON); + var schema = this.getSchema(); + + return new DataModel(data, schema); + } + + /** + * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel} + * instance based on a set of dimensions. + * + * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for + * that split + * + * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible + * combinations of the dimensional values for all the dimensions provided + * + * Additionally, it also accepts a predicate function to reduce the set of rows provided. A + * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on + * the predicate function + * + * @example + * // without predicate function: + * const splitDt = dt.splitByRow(['Origin']) + * console.log(splitDt)); + * // This should give three unique DataModel instances, one each having rows only for 'USA', + * // 'Europe' and 'Japan' respectively + * + * @example + * // without predicate function: + * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders']) + * console.log(splitDtMulti)); + * // This should give DataModel instances for all unique combinations of Origin and Cylinder values + * + * @example + * // with predicate function: + * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === "USA") + * console.log(splitWithPredDt); + * // This should not include the DataModel for the Origin : 'USA' + * + * + * @public + * + * @param {Array} dimensionArr - Set of dimensions based on which the split should occur + * @param {Object} config - The configuration object + * @param {string} [config.saveChild] - Configuration to save child or not + * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection. + * @return {Array} Returns the new DataModel instances after operation. + */ + + }, { + key: 'splitByRow', + value: function splitByRow(dimensionArr, reducerFn, config) { + var fieldsConfig = this.getFieldsConfig(); + + dimensionArr.forEach(function (fieldName) { + if (!fieldsConfig[fieldName]) { + throw new Error('Field ' + fieldName + ' doesn\'t exist in the schema'); + } + }); + + var defConfig = { + mode: _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].NORMAL, + saveChild: true + }; + + config = Object.assign({}, defConfig, config); + + return Object(_helper__WEBPACK_IMPORTED_MODULE_1__["splitWithSelect"])(this, dimensionArr, reducerFn, config); + } + + /** + * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel} + * instance based on a set of common and unique field names provided. + * + * Each DataModel created contains a set of fields which are common to all and a set of unique fields. + * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective + * fields + * + * @example + * // without predicate function: + * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin']) + * console.log(splitDt)); + * // This should give two unique DataModel instances, both having the field 'Origin' and + * // one each having 'Acceleration' and 'Horsepower' fields respectively + * + * @example + * // without predicate function: + * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'], + * {mode: 'inverse'}) + * console.log(splitDtInv)); + * // This should give DataModel instances in the following way: + * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders' + * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance + * // has rest of the fields except 'Horsepower' + * + * + * + * @public + * + * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance + * @param {Array} commonFields - Set of common fields included in all datamModel instances + * @param {Object} config - The configuration object + * @param {string} [config.saveChild] - Configuration to save child or not + * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection. + * @return {Array} Returns the new DataModel instances after operation. + */ + + }, { + key: 'splitByColumn', + value: function splitByColumn() { + var uniqueFields = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : []; + var commonFields = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; + var config = arguments[2]; + + var defConfig = { + mode: _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].NORMAL, + saveChild: true + }; + var fieldConfig = this.getFieldsConfig(); + var allFields = Object.keys(fieldConfig); + var normalizedProjFieldSets = [[commonFields]]; + + config = Object.assign({}, defConfig, config); + uniqueFields = uniqueFields.length ? uniqueFields : [[]]; + + uniqueFields.forEach(function (fieldSet, i) { + normalizedProjFieldSets[i] = Object(_helper__WEBPACK_IMPORTED_MODULE_1__["getNormalizedProFields"])([].concat(_toConsumableArray(fieldSet), _toConsumableArray(commonFields)), allFields, fieldConfig); + }); + + return Object(_helper__WEBPACK_IMPORTED_MODULE_1__["splitWithProject"])(this, normalizedProjFieldSets, config, allFields); + } + }], [{ + key: 'configureInvalidAwareTypes', + + + /** + * Configure null, undefined, invalid values in the source data + * + * @public + * + * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are + * represented in DataModel. + * @param {string} [config.undefined] - Define how an undefined value will be represented. + * @param {string} [config.null] - Define how a null value will be represented. + * @param {string} [config.invalid] - Define how a non-parsable value will be represented. + */ + value: function configureInvalidAwareTypes(config) { + return _invalid_aware_types__WEBPACK_IMPORTED_MODULE_8__["default"].invalidAwareVals(config); + } + }, { + key: 'Reducers', + get: function get() { + return _utils_reducer_store__WEBPACK_IMPORTED_MODULE_6__["default"]; + } + + /** + * Converters are functions that transforms data in various format tpo datamodel consumabe format. + */ + + }, { + key: 'Converters', + get: function get() { + return _converter__WEBPACK_IMPORTED_MODULE_10__["converterStore"]; + } + + /** + * Register new type of fields + */ + + }, { + key: 'FieldTypes', + get: function get() { + return _fields__WEBPACK_IMPORTED_MODULE_11__["fieldRegistry"]; + } + }]); + + return DataModel; +}(_relation__WEBPACK_IMPORTED_MODULE_5__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (DataModel); + +/***/ }), + +/***/ "./src/default-config.js": +/*!*******************************!*\ + !*** ./src/default-config.js ***! + \*******************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); + + +/* harmony default export */ __webpack_exports__["default"] = ({ + dataFormat: _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].AUTO +}); + +/***/ }), + +/***/ "./src/enums/data-format.js": +/*!**********************************!*\ + !*** ./src/enums/data-format.js ***! + \**********************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * DataFormat Enum defines the format of the input data. + * Based on the format of the data the respective adapter is loaded. + * + * @readonly + * @enum {string} + */ +var DataFormat = { + FLAT_JSON: 'FlatJSON', + DSV_STR: 'DSVStr', + DSV_ARR: 'DSVArr', + AUTO: 'Auto' +}; + +/* harmony default export */ __webpack_exports__["default"] = (DataFormat); + +/***/ }), + +/***/ "./src/enums/dimension-subtype.js": +/*!****************************************!*\ + !*** ./src/enums/dimension-subtype.js ***! + \****************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * DimensionSubtype enum defines the sub types of the Dimensional Field. + * + * @readonly + * @enum {string} + */ +var DimensionSubtype = { + CATEGORICAL: 'categorical', + TEMPORAL: 'temporal', + BINNED: 'binned' +}; + +/* harmony default export */ __webpack_exports__["default"] = (DimensionSubtype); + +/***/ }), + +/***/ "./src/enums/field-type.js": +/*!*********************************!*\ + !*** ./src/enums/field-type.js ***! + \*********************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * FieldType enum defines the high level field based on which visuals are controlled. + * Measure in a high level is numeric field and Dimension in a high level is string field. + * + * @readonly + * @enum {string} + */ +var FieldType = { + MEASURE: 'measure', + DIMENSION: 'dimension' +}; + +/* harmony default export */ __webpack_exports__["default"] = (FieldType); + +/***/ }), + +/***/ "./src/enums/filtering-mode.js": +/*!*************************************!*\ + !*** ./src/enums/filtering-mode.js ***! + \*************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * Filtering mode enum defines the filering modes of DataModel. + * + * @readonly + * @enum {string} + */ +var FilteringMode = { + NORMAL: 'normal', + INVERSE: 'inverse', + ALL: 'all' +}; + +/* harmony default export */ __webpack_exports__["default"] = (FilteringMode); + +/***/ }), + +/***/ "./src/enums/group-by-functions.js": +/*!*****************************************!*\ + !*** ./src/enums/group-by-functions.js ***! + \*****************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * Group by function names + * + * @readonly + * @enum {string} + */ +var GROUP_BY_FUNCTIONS = { + SUM: 'sum', + AVG: 'avg', + MIN: 'min', + MAX: 'max', + FIRST: 'first', + LAST: 'last', + COUNT: 'count', + STD: 'std' +}; + +/* harmony default export */ __webpack_exports__["default"] = (GROUP_BY_FUNCTIONS); + +/***/ }), + +/***/ "./src/enums/index.js": +/*!****************************!*\ + !*** ./src/enums/index.js ***! + \****************************/ +/*! exports provided: DataFormat, DimensionSubtype, MeasureSubtype, FieldType, FilteringMode, GROUP_BY_FUNCTIONS */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _data_format__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./data-format */ "./src/enums/data-format.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DataFormat", function() { return _data_format__WEBPACK_IMPORTED_MODULE_0__["default"]; }); + +/* harmony import */ var _dimension_subtype__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./dimension-subtype */ "./src/enums/dimension-subtype.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DimensionSubtype", function() { return _dimension_subtype__WEBPACK_IMPORTED_MODULE_1__["default"]; }); + +/* harmony import */ var _measure_subtype__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./measure-subtype */ "./src/enums/measure-subtype.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "MeasureSubtype", function() { return _measure_subtype__WEBPACK_IMPORTED_MODULE_2__["default"]; }); + +/* harmony import */ var _field_type__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./field-type */ "./src/enums/field-type.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "FieldType", function() { return _field_type__WEBPACK_IMPORTED_MODULE_3__["default"]; }); + +/* harmony import */ var _filtering_mode__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./filtering-mode */ "./src/enums/filtering-mode.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "FilteringMode", function() { return _filtering_mode__WEBPACK_IMPORTED_MODULE_4__["default"]; }); + +/* harmony import */ var _group_by_functions__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./group-by-functions */ "./src/enums/group-by-functions.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "GROUP_BY_FUNCTIONS", function() { return _group_by_functions__WEBPACK_IMPORTED_MODULE_5__["default"]; }); + +/** + * FilteringMode determines if resultant DataModel should be created from selection set or rejection set. + * + * The following modes are available + * - `NORMAL`: Only entries from selection set are included in the resulatant DataModel instance + * - `INVERSE`: Only entries from rejection set are included in the resulatant DataModel instance + * - ALL: Both the entries from selection and rejection set are returned in two different DataModel instance + */ + + + + + + + + +/***/ }), + +/***/ "./src/enums/measure-subtype.js": +/*!**************************************!*\ + !*** ./src/enums/measure-subtype.js ***! + \**************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * MeasureSubtype enum defines the sub types of the Measure Field. + * + * @readonly + * @enum {string} + */ +var MeasureSubtype = { + CONTINUOUS: 'continuous' +}; + +/* harmony default export */ __webpack_exports__["default"] = (MeasureSubtype); + +/***/ }), + +/***/ "./src/export.js": +/*!***********************!*\ + !*** ./src/export.js ***! + \***********************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _datamodel__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./datamodel */ "./src/datamodel.js"); +/* harmony import */ var _operator__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./operator */ "./src/operator/index.js"); +/* harmony import */ var _stats__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./stats */ "./src/stats/index.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); +/* harmony import */ var _converter__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./converter */ "./src/converter/index.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./utils */ "./src/utils/index.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./constants */ "./src/constants/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./invalid-aware-types */ "./src/invalid-aware-types.js"); +/* harmony import */ var _package_json__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ../package.json */ "./package.json"); +var _package_json__WEBPACK_IMPORTED_MODULE_8___namespace = /*#__PURE__*/__webpack_require__.t(/*! ../package.json */ "./package.json", 1); +/* harmony import */ var _fields__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./fields */ "./src/fields/index.js"); + + + + + + + + + + + +var Operators = { + compose: _operator__WEBPACK_IMPORTED_MODULE_1__["compose"], + bin: _operator__WEBPACK_IMPORTED_MODULE_1__["bin"], + select: _operator__WEBPACK_IMPORTED_MODULE_1__["select"], + project: _operator__WEBPACK_IMPORTED_MODULE_1__["project"], + groupBy: _operator__WEBPACK_IMPORTED_MODULE_1__["groupby"], + calculateVariable: _operator__WEBPACK_IMPORTED_MODULE_1__["calculateVariable"], + sort: _operator__WEBPACK_IMPORTED_MODULE_1__["sort"], + crossProduct: _operator__WEBPACK_IMPORTED_MODULE_1__["crossProduct"], + difference: _operator__WEBPACK_IMPORTED_MODULE_1__["difference"], + naturalJoin: _operator__WEBPACK_IMPORTED_MODULE_1__["naturalJoin"], + leftOuterJoin: _operator__WEBPACK_IMPORTED_MODULE_1__["leftOuterJoin"], + rightOuterJoin: _operator__WEBPACK_IMPORTED_MODULE_1__["rightOuterJoin"], + fullOuterJoin: _operator__WEBPACK_IMPORTED_MODULE_1__["fullOuterJoin"], + union: _operator__WEBPACK_IMPORTED_MODULE_1__["union"], + rowDiffsetIterator: _operator__WEBPACK_IMPORTED_MODULE_1__["rowDiffsetIterator"] +}; + +var version = _package_json__WEBPACK_IMPORTED_MODULE_8__.version; +Object.assign(_datamodel__WEBPACK_IMPORTED_MODULE_0__["default"], { + Operators: Operators, + Stats: _stats__WEBPACK_IMPORTED_MODULE_2__, + DM_DERIVATIVES: _constants__WEBPACK_IMPORTED_MODULE_6__["DM_DERIVATIVES"], + DateTimeFormatter: _utils__WEBPACK_IMPORTED_MODULE_5__["DateTimeFormatter"], + DataFormat: _constants__WEBPACK_IMPORTED_MODULE_6__["DataFormat"], + FilteringMode: _constants__WEBPACK_IMPORTED_MODULE_6__["FilteringMode"], + InvalidAwareTypes: _invalid_aware_types__WEBPACK_IMPORTED_MODULE_7__["default"], + version: version, + DataConverter: _converter__WEBPACK_IMPORTED_MODULE_4__["DataConverter"], + FieldsUtility: _fields__WEBPACK_IMPORTED_MODULE_9__ +}, _enums__WEBPACK_IMPORTED_MODULE_3__); + +/* harmony default export */ __webpack_exports__["default"] = (_datamodel__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/***/ }), + +/***/ "./src/field-creator.js": +/*!******************************!*\ + !*** ./src/field-creator.js ***! + \******************************/ +/*! exports provided: createUnitFieldFromPartial, createFields */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "createUnitFieldFromPartial", function() { return createUnitFieldFromPartial; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "createFields", function() { return createFields; }); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); +/* harmony import */ var _fields__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./fields */ "./src/fields/index.js"); + + + +/** + * Creates a field instance according to the provided data and schema. + * + * @param {Array} data - The field data array. + * @param {Object} schema - The field schema object. + * @return {Field} Returns the newly created field instance. + */ +function createUnitField(data, schema) { + data = data || []; + + if (_fields__WEBPACK_IMPORTED_MODULE_1__["fieldRegistry"].has(schema.subtype)) { + return _fields__WEBPACK_IMPORTED_MODULE_1__["fieldRegistry"].get(schema.subtype).BUILDER.fieldName(schema.name).schema(schema).data(data).rowDiffset('0-' + (data.length - 1)).build(); + } + return _fields__WEBPACK_IMPORTED_MODULE_1__["fieldRegistry"].get(schema.type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE ? _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS : _enums__WEBPACK_IMPORTED_MODULE_0__["DimensionSubtype"].CATEGORICAL).BUILDER.fieldName(schema.name).schema(schema).data(data).rowDiffset('0-' + (data.length - 1)).build(); +} + +/** + * Creates a field instance from partialField and rowDiffset. + * + * @param {PartialField} partialField - The corresponding partial field. + * @param {string} rowDiffset - The data subset config. + * @return {Field} Returns the newly created field instance. + */ +function createUnitFieldFromPartial(partialField, rowDiffset) { + var schema = partialField.schema; + + + if (_fields__WEBPACK_IMPORTED_MODULE_1__["fieldRegistry"].has(schema.subtype)) { + return _fields__WEBPACK_IMPORTED_MODULE_1__["fieldRegistry"].get(schema.subtype).BUILDER.partialField(partialField).rowDiffset(rowDiffset).build(); + } + return _fields__WEBPACK_IMPORTED_MODULE_1__["fieldRegistry"].get(schema.type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE ? _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS : _enums__WEBPACK_IMPORTED_MODULE_0__["DimensionSubtype"].CATEGORICAL).BUILDER.partialField(partialField).rowDiffset(rowDiffset).build(); +} + +/** + * Creates the field instances with input data and schema. + * + * @param {Array} dataColumn - The data array for fields. + * @param {Array} schema - The schema array for fields. + * @param {Array} headers - The array of header names. + * @return {Array.} Returns an array of newly created field instances. + */ +function createFields(dataColumn, schema, headers) { + var headersObj = {}; + + if (!(headers && headers.length)) { + headers = schema.map(function (item) { + return item.name; + }); + } + + headers.forEach(function (header, i) { + headersObj[header] = i; + }); + + return schema.map(function (item) { + return createUnitField(dataColumn[headersObj[item.name]], item); + }); +} + +/***/ }), + +/***/ "./src/field-store.js": +/*!****************************!*\ + !*** ./src/field-store.js ***! + \****************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./utils */ "./src/utils/index.js"); + + + +var fieldStore = { + data: {}, + + createNamespace: function createNamespace(fieldArr, name) { + var dataId = name || Object(_utils__WEBPACK_IMPORTED_MODULE_1__["getUniqueId"])(); + + this.data[dataId] = { + name: dataId, + fields: fieldArr, + + fieldsObj: function fieldsObj() { + var fieldsObj = this._cachedFieldsObj; + + if (!fieldsObj) { + fieldsObj = this._cachedFieldsObj = {}; + this.fields.forEach(function (field) { + fieldsObj[field.name()] = field; + }); + } + return fieldsObj; + }, + getMeasure: function getMeasure() { + var measureFields = this._cachedMeasure; + + if (!measureFields) { + measureFields = this._cachedMeasure = {}; + this.fields.forEach(function (field) { + if (field.schema().type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE) { + measureFields[field.name()] = field; + } + }); + } + return measureFields; + }, + getDimension: function getDimension() { + var dimensionFields = this._cachedDimension; + + if (!this._cachedDimension) { + dimensionFields = this._cachedDimension = {}; + this.fields.forEach(function (field) { + if (field.schema().type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION) { + dimensionFields[field.name()] = field; + } + }); + } + return dimensionFields; + } + }; + return this.data[dataId]; + } +}; + +/* harmony default export */ __webpack_exports__["default"] = (fieldStore); + +/***/ }), + +/***/ "./src/fields/binned/index.js": +/*!************************************!*\ + !*** ./src/fields/binned/index.js ***! + \************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _dimension__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../dimension */ "./src/fields/dimension/index.js"); +/* harmony import */ var _parsers_binned_parser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../parsers/binned-parser */ "./src/fields/parsers/binned-parser/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + +/** + * Represents binned field subtype. + * + * @public + * @class + * @extends Dimension + */ + +var Binned = function (_Dimension) { + _inherits(Binned, _Dimension); + + function Binned() { + _classCallCheck(this, Binned); + + return _possibleConstructorReturn(this, (Binned.__proto__ || Object.getPrototypeOf(Binned)).apply(this, arguments)); + } + + _createClass(Binned, [{ + key: 'calculateDataDomain', + + /** + * Calculates the corresponding field domain. + * + * @public + * @override + * @return {Array} Returns the last and first values of bins config array. + */ + value: function calculateDataDomain() { + var binsArr = this.partialField.schema.bins; + return [binsArr[0], binsArr[binsArr.length - 1]]; + } + + /** + * Returns the bins config provided while creating the field instance. + * + * @public + * @return {Array} Returns the bins array config. + */ + + }, { + key: 'bins', + value: function bins() { + return this.partialField.schema.bins; + } + }], [{ + key: 'parser', + value: function parser() { + return new _parsers_binned_parser__WEBPACK_IMPORTED_MODULE_1__["default"](); + } + }]); + + return Binned; +}(_dimension__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (Binned); + +/***/ }), + +/***/ "./src/fields/categorical/index.js": +/*!*****************************************!*\ + !*** ./src/fields/categorical/index.js ***! + \*****************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../operator/row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../enums */ "./src/enums/index.js"); +/* harmony import */ var _dimension__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../dimension */ "./src/fields/dimension/index.js"); +/* harmony import */ var _parsers_categorical_parser__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../parsers/categorical-parser */ "./src/fields/parsers/categorical-parser/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +/** + * Represents categorical field subtype. + * + * @public + * @class + * @extends Dimension + */ + +var Categorical = function (_Dimension) { + _inherits(Categorical, _Dimension); + + function Categorical() { + _classCallCheck(this, Categorical); + + return _possibleConstructorReturn(this, (Categorical.__proto__ || Object.getPrototypeOf(Categorical)).apply(this, arguments)); + } + + _createClass(Categorical, [{ + key: 'subtype', + + /** + * Returns the subtype of the field. + * + * @public + * @override + * @return {string} Returns the subtype of the field. + */ + value: function subtype() { + return _enums__WEBPACK_IMPORTED_MODULE_1__["DimensionSubtype"].CATEGORICAL; + } + + /** + * Calculates the corresponding field domain. + * + * @public + * @override + * @return {Array} Returns the unique values. + */ + + }, { + key: 'calculateDataDomain', + value: function calculateDataDomain() { + var _this2 = this; + + var hash = new Set(); + var domain = []; + + // here don't use this.data() as the iteration will be occurred two times on same data. + Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { + var datum = _this2.partialField.data[i]; + if (!hash.has(datum)) { + hash.add(datum); + domain.push(datum); + } + }); + return domain; + } + }], [{ + key: 'parser', + value: function parser() { + return new _parsers_categorical_parser__WEBPACK_IMPORTED_MODULE_3__["default"](); + } + }]); + + return Categorical; +}(_dimension__WEBPACK_IMPORTED_MODULE_2__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (Categorical); + +/***/ }), + +/***/ "./src/fields/continuous/index.js": +/*!****************************************!*\ + !*** ./src/fields/continuous/index.js ***! + \****************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../operator/row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../enums */ "./src/enums/index.js"); +/* harmony import */ var _measure__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../measure */ "./src/fields/measure/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../invalid-aware-types */ "./src/invalid-aware-types.js"); +/* harmony import */ var _parsers_continuous_parser__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../parsers/continuous-parser */ "./src/fields/parsers/continuous-parser/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + + + +/** + * Represents continuous field subtype. + * + * @public + * @class + * @extends Measure + */ + +var Continuous = function (_Measure) { + _inherits(Continuous, _Measure); + + function Continuous() { + _classCallCheck(this, Continuous); + + return _possibleConstructorReturn(this, (Continuous.__proto__ || Object.getPrototypeOf(Continuous)).apply(this, arguments)); + } + + _createClass(Continuous, [{ + key: 'subtype', + + /** + * Returns the subtype of the field. + * + * @public + * @override + * @return {string} Returns the subtype of the field. + */ + value: function subtype() { + return _enums__WEBPACK_IMPORTED_MODULE_1__["MeasureSubtype"].CONTINUOUS; + } + + /** + * Calculates the corresponding field domain. + * + * @public + * @override + * @return {Array} Returns the min and max values. + */ + + }, { + key: 'calculateDataDomain', + value: function calculateDataDomain() { + var _this2 = this; + + var min = Number.POSITIVE_INFINITY; + var max = Number.NEGATIVE_INFINITY; + + // here don't use this.data() as the iteration will be occurred two times on same data. + Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { + var datum = _this2.partialField.data[i]; + if (datum instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__["default"]) { + return; + } + + if (datum < min) { + min = datum; + } + if (datum > max) { + max = datum; + } + }); + + return [min, max]; + } + }], [{ + key: 'parser', + value: function parser() { + return new _parsers_continuous_parser__WEBPACK_IMPORTED_MODULE_4__["default"](); + } + }]); + + return Continuous; +}(_measure__WEBPACK_IMPORTED_MODULE_2__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (Continuous); + +/***/ }), + +/***/ "./src/fields/dimension/index.js": +/*!***************************************!*\ + !*** ./src/fields/dimension/index.js ***! + \***************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _field__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../field */ "./src/fields/field/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + +/** + * Represents dimension field type. + * + * @public + * @class + * @extends Field + */ + +var Dimension = function (_Field) { + _inherits(Dimension, _Field); + + function Dimension() { + _classCallCheck(this, Dimension); + + return _possibleConstructorReturn(this, (Dimension.__proto__ || Object.getPrototypeOf(Dimension)).apply(this, arguments)); + } + + _createClass(Dimension, [{ + key: 'domain', + + /** + * Returns the domain for the dimension field. + * + * @override + * @public + * @return {any} Returns the calculated domain. + */ + value: function domain() { + if (!this._cachedDomain) { + this._cachedDomain = this.calculateDataDomain(); + } + return this._cachedDomain; + } + + /** + * Calculates the corresponding field domain. + * + * @public + * @abstract + */ + + }, { + key: 'calculateDataDomain', + value: function calculateDataDomain() { + throw new Error('Not yet implemented'); + } + + /** + * Returns the formatted version of the underlying field data. + * + * @public + * @override + * @return {Array} Returns the formatted data. + */ + + }, { + key: 'formattedData', + value: function formattedData() { + return this.data(); + } + }]); + + return Dimension; +}(_field__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (Dimension); + +/***/ }), + +/***/ "./src/fields/field-registry.js": +/*!**************************************!*\ + !*** ./src/fields/field-registry.js ***! + \**************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _categorical__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./categorical */ "./src/fields/categorical/index.js"); +/* harmony import */ var _temporal__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./temporal */ "./src/fields/temporal/index.js"); +/* harmony import */ var _binned__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./binned */ "./src/fields/binned/index.js"); +/* harmony import */ var _continuous__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./continuous */ "./src/fields/continuous/index.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + + + + +var FieldTypeRegistry = function () { + function FieldTypeRegistry() { + _classCallCheck(this, FieldTypeRegistry); + + this._fieldType = new Map(); + } + + _createClass(FieldTypeRegistry, [{ + key: 'registerFieldType', + value: function registerFieldType(subtype, dimension) { + this._fieldType.set(subtype, dimension); + return this; + } + }, { + key: 'has', + value: function has(type) { + return this._fieldType.has(type); + } + }, { + key: 'get', + value: function get(type) { + return this._fieldType.get(type); + } + }]); + + return FieldTypeRegistry; +}(); + +var registerDefaultFields = function registerDefaultFields(store) { + store.registerFieldType(_enums__WEBPACK_IMPORTED_MODULE_4__["DimensionSubtype"].CATEGORICAL, _categorical__WEBPACK_IMPORTED_MODULE_0__["default"]).registerFieldType(_enums__WEBPACK_IMPORTED_MODULE_4__["DimensionSubtype"].TEMPORAL, _temporal__WEBPACK_IMPORTED_MODULE_1__["default"]).registerFieldType(_enums__WEBPACK_IMPORTED_MODULE_4__["DimensionSubtype"].BINNED, _binned__WEBPACK_IMPORTED_MODULE_2__["default"]).registerFieldType(_enums__WEBPACK_IMPORTED_MODULE_4__["MeasureSubtype"].CONTINUOUS, _continuous__WEBPACK_IMPORTED_MODULE_3__["default"]); +}; + +var fieldRegistry = function () { + var store = null; + function getStore() { + store = new FieldTypeRegistry(); + registerDefaultFields(store); + return store; + } + return store || getStore(); +}(); + +/* harmony default export */ __webpack_exports__["default"] = (fieldRegistry); + +/***/ }), + +/***/ "./src/fields/field/index.js": +/*!***********************************!*\ + !*** ./src/fields/field/index.js ***! + \***********************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../operator/row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _partial_field__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../partial-field */ "./src/fields/partial-field/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + +/** + * In {@link DataModel}, every tabular data consists of column, a column is stored as field. + * Field contains all the data for a given column in an array. + * + * Each record consists of several fields; the fields of all records form the columns. + * Examples of fields: name, gender, sex etc. + * + * In DataModel, each field can have multiple attributes which describes its data and behaviour. + * A field can have two types of data: Measure and Dimension. + * + * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that + * quantify the data set. + * In short a dimension is the lens through which you are looking at your measure data. + * + * Refer to {@link Schema} to get info about possible field attributes. + * + * @public + * @class + */ + +var Field = function () { + /** + * Initialize a new instance. + * + * @public + * @param {PartialField} partialField - The partialField instance which holds the whole data. + * @param {string} rowDiffset - The data subset definition. + */ + function Field(partialField, rowDiffset) { + _classCallCheck(this, Field); + + this.partialField = partialField; + this.rowDiffset = rowDiffset; + } + + _createClass(Field, [{ + key: 'domain', + + + /** + * Generates the field type specific domain. + * + * @public + * @abstract + */ + value: function domain() { + throw new Error('Not yet implemented'); + } + + /** + * Returns the the field schema. + * + * @public + * @return {string} Returns the field schema. + */ + + }, { + key: 'schema', + value: function schema() { + return this.partialField.schema; + } + + /** + * Returns the name of the field. + * + * @public + * @return {string} Returns the name of the field. + */ + + }, { + key: 'name', + value: function name() { + return this.partialField.name; + } + + /** + * Returns the type of the field. + * + * @public + * @return {string} Returns the type of the field. + */ + + }, { + key: 'type', + value: function type() { + return this.partialField.schema.type; + } + + /** + * Returns the subtype of the field. + * + * @public + * @return {string} Returns the subtype of the field. + */ + + }, { + key: 'subtype', + value: function subtype() { + return this.partialField.schema.subtype; + } + + /** + * Returns the description of the field. + * + * @public + * @return {string} Returns the description of the field. + */ + + }, { + key: 'description', + value: function description() { + return this.partialField.schema.description; + } + + /** + * Returns the display name of the field. + * + * @public + * @return {string} Returns the display name of the field. + */ + + }, { + key: 'displayName', + value: function displayName() { + return this.partialField.schema.displayName || this.partialField.schema.name; + } + + /** + * Returns the data associated with the field. + * + * @public + * @return {Array} Returns the data. + */ + + }, { + key: 'data', + value: function data() { + var _this = this; + + var data = []; + Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { + data.push(_this.partialField.data[i]); + }); + return data; + } + + /** + * Returns the formatted version of the underlying field data. + * + * @public + * @abstract + */ + + }, { + key: 'formattedData', + value: function formattedData() { + throw new Error('Not yet implemented'); + } + }], [{ + key: 'parser', + value: function parser() { + throw new Error('Not yet implemented'); + } + }, { + key: 'BUILDER', + get: function get() { + var builder = { + _params: {}, + _context: this, + fieldName: function fieldName(name) { + this._params.name = name; + return this; + }, + schema: function schema(_schema) { + this._params.schema = _schema; + return this; + }, + data: function data(_data) { + this._params.data = _data; + return this; + }, + partialField: function partialField(_partialField) { + this._params.partialField = _partialField; + return this; + }, + rowDiffset: function rowDiffset(_rowDiffset) { + this._params.rowDiffset = _rowDiffset; + return this; + }, + build: function build() { + var partialField = null; + if (this._params.partialField instanceof _partial_field__WEBPACK_IMPORTED_MODULE_1__["default"]) { + partialField = this._params.partialField; + } else if (this._params.schema && this._params.data) { + partialField = new _partial_field__WEBPACK_IMPORTED_MODULE_1__["default"](this._params.name, this._params.data, this._params.schema, this._context.parser()); + } else { + throw new Error('Invalid Field parameters'); + } + return new this._context(partialField, this._params.rowDiffset); + } + }; + return builder; + } + }]); + + return Field; +}(); + +/* harmony default export */ __webpack_exports__["default"] = (Field); + +/***/ }), + +/***/ "./src/fields/index.js": +/*!*****************************!*\ + !*** ./src/fields/index.js ***! + \*****************************/ +/*! exports provided: Dimension, Measure, FieldParser, fieldRegistry, columnMajor */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _dimension__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./dimension */ "./src/fields/dimension/index.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "Dimension", function() { return _dimension__WEBPACK_IMPORTED_MODULE_0__["default"]; }); + +/* harmony import */ var _measure__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./measure */ "./src/fields/measure/index.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "Measure", function() { return _measure__WEBPACK_IMPORTED_MODULE_1__["default"]; }); + +/* harmony import */ var _parsers_field_parser__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./parsers/field-parser */ "./src/fields/parsers/field-parser/index.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "FieldParser", function() { return _parsers_field_parser__WEBPACK_IMPORTED_MODULE_2__["default"]; }); + +/* harmony import */ var _field_registry__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./field-registry */ "./src/fields/field-registry.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "fieldRegistry", function() { return _field_registry__WEBPACK_IMPORTED_MODULE_3__["default"]; }); + +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "columnMajor", function() { return _utils__WEBPACK_IMPORTED_MODULE_4__["columnMajor"]; }); + + + + + + + +/***/ }), + +/***/ "./src/fields/measure/index.js": +/*!*************************************!*\ + !*** ./src/fields/measure/index.js ***! + \*************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../utils */ "./src/utils/index.js"); +/* harmony import */ var _operator_group_by_function__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../operator/group-by-function */ "./src/operator/group-by-function.js"); +/* harmony import */ var _field__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../field */ "./src/fields/field/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +/** + * Represents measure field type. + * + * @public + * @class + * @extends Field + */ + +var Measure = function (_Field) { + _inherits(Measure, _Field); + + function Measure() { + _classCallCheck(this, Measure); + + return _possibleConstructorReturn(this, (Measure.__proto__ || Object.getPrototypeOf(Measure)).apply(this, arguments)); + } + + _createClass(Measure, [{ + key: 'domain', + + /** + * Returns the domain for the measure field. + * + * @override + * @public + * @return {any} Returns the calculated domain. + */ + value: function domain() { + if (!this._cachedDomain) { + this._cachedDomain = this.calculateDataDomain(); + } + return this._cachedDomain; + } + + /** + * Returns the unit of the measure field. + * + * @public + * @return {string} Returns unit of the field. + */ + + }, { + key: 'unit', + value: function unit() { + return this.partialField.schema.unit; + } + + /** + * Returns the aggregation function name of the measure field. + * + * @public + * @return {string} Returns aggregation function name of the field. + */ + + }, { + key: 'defAggFn', + value: function defAggFn() { + return this.partialField.schema.defAggFn || _operator_group_by_function__WEBPACK_IMPORTED_MODULE_1__["defaultReducerName"]; + } + + /** + * Returns the number format of the measure field. + * + * @public + * @return {Function} Returns number format of the field. + */ + + }, { + key: 'numberFormat', + value: function numberFormat() { + var numberFormat = this.partialField.schema.numberFormat; + + return numberFormat instanceof Function ? numberFormat : _utils__WEBPACK_IMPORTED_MODULE_0__["formatNumber"]; + } + + /** + * Calculates the corresponding field domain. + * + * @public + * @abstract + */ + + }, { + key: 'calculateDataDomain', + value: function calculateDataDomain() { + throw new Error('Not yet implemented'); + } + + /** + * Returns the formatted version of the underlying field data. + * + * @public + * @override + * @return {Array} Returns the formatted data. + */ + + }, { + key: 'formattedData', + value: function formattedData() { + return this.data(); + } + }]); + + return Measure; +}(_field__WEBPACK_IMPORTED_MODULE_2__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (Measure); + +/***/ }), + +/***/ "./src/fields/parsers/binned-parser/index.js": +/*!***************************************************!*\ + !*** ./src/fields/parsers/binned-parser/index.js ***! + \***************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _field_parser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../field-parser */ "./src/fields/parsers/field-parser/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../../invalid-aware-types */ "./src/invalid-aware-types.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + +/** + * A FieldParser which parses the binned values. + * + * @public + * @class + * @implements {FieldParser} + */ + +var BinnedParser = function (_FieldParser) { + _inherits(BinnedParser, _FieldParser); + + function BinnedParser() { + _classCallCheck(this, BinnedParser); + + return _possibleConstructorReturn(this, (BinnedParser.__proto__ || Object.getPrototypeOf(BinnedParser)).apply(this, arguments)); + } + + _createClass(BinnedParser, [{ + key: 'parse', + + /** + * Parses a single binned value of a field and returns the sanitized value. + * + * @public + * @param {string} val - The value of the field. + * @return {string} Returns the sanitized value. + */ + value: function parse(val) { + var regex = /^\s*([+-]?\d+(?:\.\d+)?)\s*-\s*([+-]?\d+(?:\.\d+)?)\s*$/; + val = String(val); + var result = void 0; + // check if invalid date value + if (!_invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].isInvalid(val)) { + var matched = val.match(regex); + result = matched ? Number.parseFloat(matched[1]) + '-' + Number.parseFloat(matched[2]) : _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NA; + } else { + result = _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].getInvalidType(val); + } + return result; + } + }]); + + return BinnedParser; +}(_field_parser__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (BinnedParser); + +/***/ }), + +/***/ "./src/fields/parsers/categorical-parser/index.js": +/*!********************************************************!*\ + !*** ./src/fields/parsers/categorical-parser/index.js ***! + \********************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _field_parser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../field-parser */ "./src/fields/parsers/field-parser/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../../invalid-aware-types */ "./src/invalid-aware-types.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + +/** + * A FieldParser which parses the categorical values. + * + * @public + * @class + * @implements {FieldParser} + */ + +var CategoricalParser = function (_FieldParser) { + _inherits(CategoricalParser, _FieldParser); + + function CategoricalParser() { + _classCallCheck(this, CategoricalParser); + + return _possibleConstructorReturn(this, (CategoricalParser.__proto__ || Object.getPrototypeOf(CategoricalParser)).apply(this, arguments)); + } + + _createClass(CategoricalParser, [{ + key: 'parse', + + /** + * Parses a single value of a field and returns the stringified form. + * + * @public + * @param {string|number} val - The value of the field. + * @return {string} Returns the stringified value. + */ + value: function parse(val) { + var result = void 0; + // check if invalid date value + if (!_invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].isInvalid(val)) { + result = String(val).trim(); + } else { + result = _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].getInvalidType(val); + } + return result; + } + }]); + + return CategoricalParser; +}(_field_parser__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (CategoricalParser); + +/***/ }), + +/***/ "./src/fields/parsers/continuous-parser/index.js": +/*!*******************************************************!*\ + !*** ./src/fields/parsers/continuous-parser/index.js ***! + \*******************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _field_parser__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../field-parser */ "./src/fields/parsers/field-parser/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../../invalid-aware-types */ "./src/invalid-aware-types.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + +/** + * A FieldParser which parses the continuous values. + * + * @public + * @class + * @implements {FieldParser} + */ + +var ContinuousParser = function (_FieldParser) { + _inherits(ContinuousParser, _FieldParser); + + function ContinuousParser() { + _classCallCheck(this, ContinuousParser); + + return _possibleConstructorReturn(this, (ContinuousParser.__proto__ || Object.getPrototypeOf(ContinuousParser)).apply(this, arguments)); + } + + _createClass(ContinuousParser, [{ + key: 'parse', + + /** + * Parses a single value of a field and returns the number form. + * + * @public + * @param {string|number} val - The value of the field. + * @return {string} Returns the number value. + */ + value: function parse(val) { + var result = void 0; + // check if invalid date value + if (!_invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].isInvalid(val)) { + var parsedVal = parseFloat(val, 10); + result = Number.isNaN(parsedVal) ? _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NA : parsedVal; + } else { + result = _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].getInvalidType(val); + } + return result; + } + }]); + + return ContinuousParser; +}(_field_parser__WEBPACK_IMPORTED_MODULE_0__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (ContinuousParser); + +/***/ }), + +/***/ "./src/fields/parsers/field-parser/index.js": +/*!**************************************************!*\ + !*** ./src/fields/parsers/field-parser/index.js ***! + \**************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +/** + * A interface to represent a parser which is responsible to parse the field. + * + * @public + * @interface + */ +var FieldParser = function () { + function FieldParser() { + _classCallCheck(this, FieldParser); + } + + _createClass(FieldParser, [{ + key: 'parse', + + /** + * Parses a single value of a field and return the sanitized form. + * + * @public + * @abstract + */ + value: function parse() { + throw new Error('Not yet implemented'); + } + }]); + + return FieldParser; +}(); + +/* harmony default export */ __webpack_exports__["default"] = (FieldParser); + +/***/ }), + +/***/ "./src/fields/parsers/temporal-parser/index.js": +/*!*****************************************************!*\ + !*** ./src/fields/parsers/temporal-parser/index.js ***! + \*****************************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../../utils */ "./src/utils/index.js"); +/* harmony import */ var _field_parser__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../field-parser */ "./src/fields/parsers/field-parser/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../invalid-aware-types */ "./src/invalid-aware-types.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + +/** + * A FieldParser which parses the temporal values. + * + * @public + * @class + * @implements {FieldParser} + */ + +var TemporalParser = function (_FieldParser) { + _inherits(TemporalParser, _FieldParser); + + function TemporalParser() { + _classCallCheck(this, TemporalParser); + + return _possibleConstructorReturn(this, (TemporalParser.__proto__ || Object.getPrototypeOf(TemporalParser)).apply(this, arguments)); + } + + _createClass(TemporalParser, [{ + key: 'parse', + + + /** + * Parses a single value of a field and returns the millisecond value. + * + * @public + * @param {string|number} val - The value of the field. + * @return {number} Returns the millisecond value. + */ + value: function parse(val, _ref) { + var format = _ref.format; + + var result = void 0; + // check if invalid date value + if (!this._dtf) { + this._dtf = new _utils__WEBPACK_IMPORTED_MODULE_0__["DateTimeFormatter"](format); + } + if (!_invalid_aware_types__WEBPACK_IMPORTED_MODULE_2__["default"].isInvalid(val)) { + var nativeDate = this._dtf.getNativeDate(val); + result = nativeDate ? nativeDate.getTime() : _invalid_aware_types__WEBPACK_IMPORTED_MODULE_2__["default"].NA; + } else { + result = _invalid_aware_types__WEBPACK_IMPORTED_MODULE_2__["default"].getInvalidType(val); + } + return result; + } + }]); + + return TemporalParser; +}(_field_parser__WEBPACK_IMPORTED_MODULE_1__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (TemporalParser); + +/***/ }), + +/***/ "./src/fields/partial-field/index.js": +/*!*******************************************!*\ + !*** ./src/fields/partial-field/index.js ***! + \*******************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +/** + * Stores the full data and the metadata of a field. It provides + * a single source of data from which the future Field + * instance can get a subset of it with a rowDiffset config. + * + * @class + * @public + */ +var PartialField = function () { + /** + * Initialize a new instance. + * + * @public + * @param {string} name - The name of the field. + * @param {Array} data - The data array. + * @param {Object} schema - The schema object of the corresponding field. + * @param {FieldParser} parser - The parser instance corresponding to that field. + */ + function PartialField(name, data, schema, parser) { + _classCallCheck(this, PartialField); + + this.name = name; + this.schema = schema; + this.parser = parser; + this.data = this._sanitize(data); + } + + /** + * Sanitizes the field data. + * + * @private + * @param {Array} data - The actual input data. + * @return {Array} Returns the sanitized data. + */ + + + _createClass(PartialField, [{ + key: "_sanitize", + value: function _sanitize(data) { + var _this = this; + + return data.map(function (datum) { + return _this.parser.parse(datum, { format: _this.schema.format }); + }); + } + }]); + + return PartialField; +}(); + +/* harmony default export */ __webpack_exports__["default"] = (PartialField); + +/***/ }), + +/***/ "./src/fields/temporal/index.js": +/*!**************************************!*\ + !*** ./src/fields/temporal/index.js ***! + \**************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../operator/row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _dimension__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../dimension */ "./src/fields/dimension/index.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../utils */ "./src/utils/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../invalid-aware-types */ "./src/invalid-aware-types.js"); +/* harmony import */ var _parsers_temporal_parser__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../parsers/temporal-parser */ "./src/fields/parsers/temporal-parser/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } + +function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } + + + + + + + +/** + * Represents temporal field subtype. + * + * @public + * @class + * @extends Dimension + */ + +var Temporal = function (_Dimension) { + _inherits(Temporal, _Dimension); + + /** + * Initialize a new instance. + * + * @public + * @param {PartialField} partialField - The partialField instance which holds the whole data. + * @param {string} rowDiffset - The data subset definition. + */ + function Temporal(partialField, rowDiffset) { + _classCallCheck(this, Temporal); + + var _this = _possibleConstructorReturn(this, (Temporal.__proto__ || Object.getPrototypeOf(Temporal)).call(this, partialField, rowDiffset)); + + _this._cachedMinDiff = null; + return _this; + } + + /** + * Calculates the corresponding field domain. + * + * @public + * @override + * @return {Array} Returns the unique values. + */ + + + _createClass(Temporal, [{ + key: 'calculateDataDomain', + value: function calculateDataDomain() { + var _this2 = this; + + var hash = new Set(); + var domain = []; + + // here don't use this.data() as the iteration will be + // occurred two times on same data. + Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { + var datum = _this2.partialField.data[i]; + if (!hash.has(datum)) { + hash.add(datum); + domain.push(datum); + } + }); + + return domain; + } + + /** + * Calculates the minimum consecutive difference from the associated field data. + * + * @public + * @return {number} Returns the minimum consecutive diff in milliseconds. + */ + + }, { + key: 'minimumConsecutiveDifference', + value: function minimumConsecutiveDifference() { + if (this._cachedMinDiff) { + return this._cachedMinDiff; + } + + var sortedData = this.data().filter(function (item) { + return !(item instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__["default"]); + }).sort(function (a, b) { + return a - b; + }); + var arrLn = sortedData.length; + var minDiff = Number.POSITIVE_INFINITY; + var prevDatum = void 0; + var nextDatum = void 0; + var processedCount = 0; + + for (var i = 1; i < arrLn; i++) { + prevDatum = sortedData[i - 1]; + nextDatum = sortedData[i]; + + if (nextDatum === prevDatum) { + continue; + } + + minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]); + processedCount++; + } + + if (!processedCount) { + minDiff = null; + } + this._cachedMinDiff = minDiff; + + return this._cachedMinDiff; + } + + /** + * Returns the format specified in the input schema while creating field. + * + * @public + * @return {string} Returns the datetime format. + */ + + }, { + key: 'format', + value: function format() { + return this.partialField.schema.format; + } + + /** + * Returns the formatted version of the underlying field data + * If data is of type invalid or has missing format use the raw value + * @public + * @override + * @return {Array} Returns the formatted data. + */ + + }, { + key: 'formattedData', + value: function formattedData() { + var _this3 = this; + + var data = []; + var dataFormat = this.format(); + + Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { + var datum = _this3.partialField.data[i]; + // If value is of invalid type or format is missing + if (_invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__["default"].isInvalid(datum) || !dataFormat && Number.isFinite(datum)) { + // Use the invalid map value or the raw value + var parsedDatum = _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__["default"].getInvalidType(datum) || datum; + data.push(parsedDatum); + } else { + data.push(_utils__WEBPACK_IMPORTED_MODULE_2__["DateTimeFormatter"].formatAs(datum, dataFormat)); + } + }); + return data; + } + }], [{ + key: 'parser', + value: function parser() { + return new _parsers_temporal_parser__WEBPACK_IMPORTED_MODULE_4__["default"](); + } + }]); + + return Temporal; +}(_dimension__WEBPACK_IMPORTED_MODULE_1__["default"]); + +/* harmony default export */ __webpack_exports__["default"] = (Temporal); + +/***/ }), + +/***/ "./src/helper.js": +/*!***********************!*\ + !*** ./src/helper.js ***! + \***********************/ +/*! exports provided: prepareJoinData, updateFields, persistCurrentDerivation, persistAncestorDerivation, persistDerivations, selectRowDiffsetIterator, rowSplitDiffsetIterator, selectHelper, cloneWithAllFields, filterPropagationModel, splitWithSelect, addDiffsetToClonedDm, cloneWithSelect, cloneWithProject, splitWithProject, sanitizeUnitSchema, validateUnitSchema, sanitizeAndValidateSchema, resolveFieldName, updateData, fieldInSchema, getDerivationArguments, getRootGroupByModel, getRootDataModel, getPathToRootModel, propagateToAllDataModels, propagateImmutableActions, addToPropNamespace, getNormalizedProFields, getNumberFormattedVal */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "prepareJoinData", function() { return prepareJoinData; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "updateFields", function() { return updateFields; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "persistCurrentDerivation", function() { return persistCurrentDerivation; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "persistAncestorDerivation", function() { return persistAncestorDerivation; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "persistDerivations", function() { return persistDerivations; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "selectRowDiffsetIterator", function() { return selectRowDiffsetIterator; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "rowSplitDiffsetIterator", function() { return rowSplitDiffsetIterator; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "selectHelper", function() { return selectHelper; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "cloneWithAllFields", function() { return cloneWithAllFields; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "filterPropagationModel", function() { return filterPropagationModel; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "splitWithSelect", function() { return splitWithSelect; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addDiffsetToClonedDm", function() { return addDiffsetToClonedDm; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "cloneWithSelect", function() { return cloneWithSelect; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "cloneWithProject", function() { return cloneWithProject; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "splitWithProject", function() { return splitWithProject; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sanitizeUnitSchema", function() { return sanitizeUnitSchema; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "validateUnitSchema", function() { return validateUnitSchema; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sanitizeAndValidateSchema", function() { return sanitizeAndValidateSchema; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "resolveFieldName", function() { return resolveFieldName; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "updateData", function() { return updateData; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fieldInSchema", function() { return fieldInSchema; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getDerivationArguments", function() { return getDerivationArguments; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getRootGroupByModel", function() { return getRootGroupByModel; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getRootDataModel", function() { return getRootDataModel; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getPathToRootModel", function() { return getPathToRootModel; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "propagateToAllDataModels", function() { return propagateToAllDataModels; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "propagateImmutableActions", function() { return propagateImmutableActions; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "addToPropNamespace", function() { return addToPropNamespace; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getNormalizedProFields", function() { return getNormalizedProFields; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getNumberFormattedVal", function() { return getNumberFormattedVal; }); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); +/* harmony import */ var _field_store__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./field-store */ "./src/field-store.js"); +/* harmony import */ var _value__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./value */ "./src/value.js"); +/* harmony import */ var _operator__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./operator */ "./src/operator/index.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./constants */ "./src/constants/index.js"); +/* harmony import */ var _field_creator__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./field-creator */ "./src/field-creator.js"); +/* harmony import */ var _default_config__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./default-config */ "./src/default-config.js"); +/* harmony import */ var _converter__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./converter */ "./src/converter/index.js"); +/* harmony import */ var _fields__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./fields */ "./src/fields/index.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./utils */ "./src/utils/index.js"); +var _selectModeMap, + _this = undefined; + +var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + + + + + + + + + + + +/** + * Prepares the selection data. + */ +function prepareSelectionData(fields, formattedData, rawData, i) { + var resp = {}; + + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = fields.entries()[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var _ref = _step.value; + + var _ref2 = _slicedToArray(_ref, 2); + + var key = _ref2[0]; + var field = _ref2[1]; + + resp[field.name()] = new _value__WEBPACK_IMPORTED_MODULE_2__["default"](formattedData[key][i], rawData[key][i], field); + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return) { + _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + + return resp; +} + +function prepareJoinData(fields) { + var resp = {}; + + for (var key in fields) { + resp[key] = new _value__WEBPACK_IMPORTED_MODULE_2__["default"](fields[key].formattedValue, fields[key].rawValue, key); + } + return resp; +} + +var updateFields = function updateFields(_ref3, partialFieldspace, fieldStoreName) { + var _ref4 = _slicedToArray(_ref3, 2), + rowDiffset = _ref4[0], + colIdentifier = _ref4[1]; + + var collID = colIdentifier.length ? colIdentifier.split(',') : []; + var partialFieldMap = partialFieldspace.fieldsObj(); + var newFields = collID.map(function (coll) { + return Object(_field_creator__WEBPACK_IMPORTED_MODULE_5__["createUnitFieldFromPartial"])(partialFieldMap[coll].partialField, rowDiffset); + }); + return _field_store__WEBPACK_IMPORTED_MODULE_1__["default"].createNamespace(newFields, fieldStoreName); +}; + +var persistCurrentDerivation = function persistCurrentDerivation(model, operation) { + var config = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + var criteriaFn = arguments[3]; + + if (operation === _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].COMPOSE) { + var _model$_derivation; + + model._derivation.length = 0; + (_model$_derivation = model._derivation).push.apply(_model$_derivation, _toConsumableArray(criteriaFn)); + } else { + model._derivation.push({ + op: operation, + meta: config, + criteria: criteriaFn + }); + } +}; +var persistAncestorDerivation = function persistAncestorDerivation(sourceDm, newDm) { + var _newDm$_ancestorDeriv; + + (_newDm$_ancestorDeriv = newDm._ancestorDerivation).push.apply(_newDm$_ancestorDeriv, _toConsumableArray(sourceDm._ancestorDerivation).concat(_toConsumableArray(sourceDm._derivation))); +}; + +var persistDerivations = function persistDerivations(sourceDm, model, operation) { + var config = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + var criteriaFn = arguments[4]; + + persistCurrentDerivation(model, operation, config, criteriaFn); + persistAncestorDerivation(sourceDm, model); +}; + +var selectModeMap = (_selectModeMap = {}, _defineProperty(_selectModeMap, _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].NORMAL, { + diffIndex: ['rowDiffset'], + calcDiff: [true, false] +}), _defineProperty(_selectModeMap, _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].INVERSE, { + diffIndex: ['rejectRowDiffset'], + calcDiff: [false, true] +}), _defineProperty(_selectModeMap, _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].ALL, { + diffIndex: ['rowDiffset', 'rejectRowDiffset'], + calcDiff: [true, true] +}), _selectModeMap); + +var generateRowDiffset = function generateRowDiffset(rowDiffset, i, lastInsertedValue) { + if (lastInsertedValue !== -1 && i === lastInsertedValue + 1) { + var li = rowDiffset.length - 1; + + rowDiffset[li] = rowDiffset[li].split('-')[0] + '-' + i; + } else { + rowDiffset.push('' + i); + } +}; + +var selectRowDiffsetIterator = function selectRowDiffsetIterator(rowDiffset, checker, mode) { + var lastInsertedValueSel = -1; + var lastInsertedValueRej = -1; + var newRowDiffSet = []; + var rejRowDiffSet = []; + + var _selectModeMap$mode$c = _slicedToArray(selectModeMap[mode].calcDiff, 2), + shouldSelect = _selectModeMap$mode$c[0], + shouldReject = _selectModeMap$mode$c[1]; + + Object(_operator__WEBPACK_IMPORTED_MODULE_3__["rowDiffsetIterator"])(rowDiffset, function (i) { + var checkerResult = checker(i); + checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel); + !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej); + }); + return { + rowDiffset: newRowDiffSet.join(','), + rejectRowDiffset: rejRowDiffSet.join(',') + }; +}; + +var rowSplitDiffsetIterator = function rowSplitDiffsetIterator(rowDiffset, checker, mode, dimensionArr, fieldStoreObj) { + var lastInsertedValue = {}; + var splitRowDiffset = {}; + var dimensionMap = {}; + + Object(_operator__WEBPACK_IMPORTED_MODULE_3__["rowDiffsetIterator"])(rowDiffset, function (i) { + if (checker(i)) { + var hash = ''; + + var dimensionSet = { keys: {} }; + + dimensionArr.forEach(function (_) { + var data = fieldStoreObj[_].partialField.data[i]; + hash = hash + '-' + data; + dimensionSet.keys[_] = data; + }); + + if (splitRowDiffset[hash] === undefined) { + splitRowDiffset[hash] = []; + lastInsertedValue[hash] = -1; + dimensionMap[hash] = dimensionSet; + } + + generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]); + lastInsertedValue[hash] = i; + } + }); + + return { + splitRowDiffset: splitRowDiffset, + dimensionMap: dimensionMap + }; +}; + +var selectHelper = function selectHelper(clonedDm, selectFn, config, sourceDm, iterator) { + var cachedStore = {}; + var cloneProvider = function cloneProvider() { + return sourceDm.detachedRoot(); + }; + var mode = config.mode; + + var rowDiffset = clonedDm._rowDiffset; + var cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects; + + var selectorHelperFn = function selectorHelperFn(index) { + return selectFn(cachedValueObjects[index], index, cloneProvider, cachedStore); + }; + + return iterator(rowDiffset, selectorHelperFn, mode); +}; + +var cloneWithAllFields = function cloneWithAllFields(model) { + var clonedDm = model.clone(false); + var partialFieldspace = model.getPartialFieldspace(); + clonedDm._colIdentifier = partialFieldspace.fields.map(function (f) { + return f.name(); + }).join(','); + + // flush out cached namespace values on addition of new fields + partialFieldspace._cachedFieldsObj = null; + partialFieldspace._cachedDimension = null; + partialFieldspace._cachedMeasure = null; + clonedDm.__calculateFieldspace().calculateFieldsConfig(); + + return clonedDm; +}; + +var getKey = function getKey(arr, data, fn) { + var key = fn(arr, data, 0); + + for (var i = 1, len = arr.length; i < len; i++) { + key = key + ',' + fn(arr, data, i); + } + return key; +}; + +var keyFn = function keyFn(arr, fields, idx, rowId) { + var val = fields[arr[idx]].internalValue; + return arr[idx] === _constants__WEBPACK_IMPORTED_MODULE_4__["ROW_ID"] ? rowId : val; +}; + +var boundsChecker = _defineProperty({}, _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS, function (val, domain) { + var domainArr = domain[0] instanceof Array ? domain : [domain]; + return domainArr.some(function (dom) { + return val >= dom[0] && val <= dom[1]; + }); +}); + +var isWithinDomain = function isWithinDomain(value, domain, fieldType) { + return boundsChecker[fieldType](value, domain); +}; + +var filterPropagationModel = function filterPropagationModel(model, propModels) { + var config = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + + var fns = []; + var operation = config.operation || _constants__WEBPACK_IMPORTED_MODULE_4__["LOGICAL_OPERATORS"].AND; + var filterByMeasure = config.filterByMeasure || false; + var clonedModel = cloneWithAllFields(model); + var modelFieldsConfig = clonedModel.getFieldsConfig(); + + if (!propModels.length) { + fns = [function () { + return false; + }]; + } else { + fns = propModels.map(function (propModel) { + return function () { + var criteria = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + var _criteria$identifiers = criteria.identifiers, + identifiers = _criteria$identifiers === undefined ? [[], []] : _criteria$identifiers, + range = criteria.range; + + var _identifiers = _slicedToArray(identifiers, 2), + _identifiers$ = _identifiers[0], + fieldNames = _identifiers$ === undefined ? [] : _identifiers$, + _identifiers$2 = _identifiers[1], + values = _identifiers$2 === undefined ? [] : _identifiers$2; + + var dLen = fieldNames.length; + var valuesMap = {}; + + if (dLen) { + for (var i = 1, len = identifiers.length; i < len; i++) { + var row = identifiers[i]; + var key = row.join(); + valuesMap[key] = 1; + } + } + var rangeKeys = Object.keys(range || {}); + return values.length || rangeKeys.length ? function (fields, i) { + var present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; + + if (filterByMeasure) { + return rangeKeys.every(function (field) { + var val = fields[field].internalValue; + return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); + }) && present; + } + return present; + } : function () { + return false; + }; + }(propModel); + }); + } + + var filteredModel = void 0; + if (operation === _constants__WEBPACK_IMPORTED_MODULE_4__["LOGICAL_OPERATORS"].AND) { + filteredModel = clonedModel.select(function (fields) { + return fns.every(function (fn) { + return fn(fields); + }); + }, { + saveChild: false + }); + } else { + filteredModel = clonedModel.select(function (fields) { + return fns.some(function (fn) { + return fn(fields); + }); + }, { + saveChild: false + }); + } + + return filteredModel; +}; + +var splitWithSelect = function splitWithSelect(sourceDm, dimensionArr) { + var reducerFn = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : function (val) { + return val; + }; + var config = arguments[3]; + var saveChild = config.saveChild; + + var fieldStoreObj = sourceDm.getFieldspace().fieldsObj(); + + var _selectHelper = selectHelper(sourceDm.clone(saveChild), reducerFn, config, sourceDm, function () { + for (var _len = arguments.length, params = Array(_len), _key = 0; _key < _len; _key++) { + params[_key] = arguments[_key]; + } + + return rowSplitDiffsetIterator.apply(undefined, params.concat([dimensionArr, fieldStoreObj])); + }), + splitRowDiffset = _selectHelper.splitRowDiffset, + dimensionMap = _selectHelper.dimensionMap; + + var clonedDMs = []; + Object.keys(splitRowDiffset).sort().forEach(function (e) { + if (splitRowDiffset[e]) { + var cloned = sourceDm.clone(saveChild); + var derivation = dimensionMap[e]; + cloned._rowDiffset = splitRowDiffset[e].join(','); + cloned.__calculateFieldspace().calculateFieldsConfig(); + + var derivationFormula = function derivationFormula(fields) { + return dimensionArr.every(function (_) { + return fields[_].internalValue === derivation.keys[_]; + }); + }; + // Store reference to child model and selector function + if (saveChild) { + persistDerivations(sourceDm, cloned, _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].SELECT, config, derivationFormula); + } + cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e]; + + clonedDMs.push(cloned); + } + }); + + return clonedDMs; +}; +var addDiffsetToClonedDm = function addDiffsetToClonedDm(clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) { + clonedDm._rowDiffset = rowDiffset; + clonedDm.__calculateFieldspace().calculateFieldsConfig(); + persistDerivations(sourceDm, clonedDm, _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].SELECT, { config: selectConfig }, selectFn); +}; + +var cloneWithSelect = function cloneWithSelect(sourceDm, selectFn, selectConfig, cloneConfig) { + var extraCloneDm = {}; + + var mode = selectConfig.mode; + + + var cloned = sourceDm.clone(cloneConfig.saveChild); + var setOfRowDiffsets = selectHelper(cloned, selectFn, selectConfig, sourceDm, selectRowDiffsetIterator); + var diffIndex = selectModeMap[mode].diffIndex; + + addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn); + + if (diffIndex.length > 1) { + extraCloneDm = sourceDm.clone(cloneConfig.saveChild); + addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn); + return [cloned, extraCloneDm]; + } + + return cloned; +}; + +var cloneWithProject = function cloneWithProject(sourceDm, projField, config, allFields) { + var cloned = sourceDm.clone(config.saveChild); + var projectionSet = projField; + if (config.mode === _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].INVERSE) { + projectionSet = allFields.filter(function (fieldName) { + return projField.indexOf(fieldName) === -1; + }); + } + // cloned._colIdentifier = sourceDm._colIdentifier.split(',') + // .filter(coll => projectionSet.indexOf(coll) !== -1).join(); + cloned._colIdentifier = projectionSet.join(','); + cloned.__calculateFieldspace().calculateFieldsConfig(); + + persistDerivations(sourceDm, cloned, _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].PROJECT, { projField: projField, config: config, actualProjField: projectionSet }, null); + + return cloned; +}; + +var splitWithProject = function splitWithProject(sourceDm, projFieldSet, config, allFields) { + return projFieldSet.map(function (projFields) { + return cloneWithProject(sourceDm, projFields, config, allFields); + }); +}; + +var sanitizeUnitSchema = function sanitizeUnitSchema(unitSchema) { + // Do deep clone of the unit schema as the user might change it later. + unitSchema = Object(_utils__WEBPACK_IMPORTED_MODULE_9__["extend2"])({}, unitSchema); + if (!unitSchema.type) { + unitSchema.type = _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION; + } + + if (!unitSchema.subtype) { + switch (unitSchema.type) { + case _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE: + unitSchema.subtype = _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS; + break; + default: + case _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION: + unitSchema.subtype = _enums__WEBPACK_IMPORTED_MODULE_0__["DimensionSubtype"].CATEGORICAL; + break; + } + } + + return unitSchema; +}; + +var validateUnitSchema = function validateUnitSchema(unitSchema) { + var type = unitSchema.type, + subtype = unitSchema.subtype, + name = unitSchema.name; + + if (type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION || type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE) { + if (!_fields__WEBPACK_IMPORTED_MODULE_8__["fieldRegistry"].has(subtype)) { + throw new Error('DataModel doesn\'t support measure field subtype ' + subtype + ' used for ' + name + ' field'); + } + } else { + throw new Error('DataModel doesn\'t support field type ' + type + ' used for ' + name + ' field'); + } +}; + +var sanitizeAndValidateSchema = function sanitizeAndValidateSchema(schema) { + return schema.map(function (unitSchema) { + unitSchema = sanitizeUnitSchema(unitSchema); + validateUnitSchema(unitSchema); + return unitSchema; + }); +}; + +var resolveFieldName = function resolveFieldName(schema, dataHeader) { + schema.forEach(function (unitSchema) { + var fieldNameAs = unitSchema.as; + if (!fieldNameAs) { + return; + } + + var idx = dataHeader.indexOf(unitSchema.name); + dataHeader[idx] = fieldNameAs; + unitSchema.name = fieldNameAs; + delete unitSchema.as; + }); +}; + +var updateData = function updateData(relation, data, schema, options) { + schema = sanitizeAndValidateSchema(schema); + options = Object.assign(Object.assign({}, _default_config__WEBPACK_IMPORTED_MODULE_6__["default"]), options); + var converter = _converter__WEBPACK_IMPORTED_MODULE_7__["converterStore"].get(options.dataFormat); + + if (!converter) { + throw new Error('No converter function found for ' + options.dataFormat + ' format'); + } + + var _converter$convert = converter.convert(data, schema, options), + _converter$convert2 = _slicedToArray(_converter$convert, 2), + header = _converter$convert2[0], + formattedData = _converter$convert2[1]; + + resolveFieldName(schema, header); + var fieldArr = Object(_field_creator__WEBPACK_IMPORTED_MODULE_5__["createFields"])(formattedData, schema, header); + + // This will create a new fieldStore with the fields + var nameSpace = _field_store__WEBPACK_IMPORTED_MODULE_1__["default"].createNamespace(fieldArr, options.name); + relation._partialFieldspace = nameSpace; + + // If data is provided create the default colIdentifier and rowDiffset + relation._rowDiffset = formattedData.length && formattedData[0].length ? '0-' + (formattedData[0].length - 1) : ''; + + // This stores the value objects which is passed to the filter method when selection operation is done. + var valueObjects = []; + var fields = nameSpace.fields; + + var rawFieldsData = fields.map(function (field) { + return field.data(); + }); + var formattedFieldsData = fields.map(function (field) { + return field.formattedData(); + }); + Object(_operator__WEBPACK_IMPORTED_MODULE_3__["rowDiffsetIterator"])(relation._rowDiffset, function (i) { + valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i); + }); + nameSpace._cachedValueObjects = valueObjects; + + relation._colIdentifier = schema.map(function (_) { + return _.name; + }).join(); + relation._dataFormat = options.dataFormat === _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].AUTO ? Object(_utils__WEBPACK_IMPORTED_MODULE_9__["detectDataFormat"])(data) : options.dataFormat; + return relation; +}; + +var fieldInSchema = function fieldInSchema(schema, field) { + var i = 0; + + for (; i < schema.length; ++i) { + if (field === schema[i].name) { + return { + name: field, + type: schema[i].subtype || schema[i].type, + index: i + }; + } + } + return null; +}; + +var getDerivationArguments = function getDerivationArguments(derivation) { + var params = []; + var operation = void 0; + operation = derivation.op; + switch (operation) { + case _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].SELECT: + params = [derivation.criteria]; + break; + case _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].PROJECT: + params = [derivation.meta.actualProjField]; + break; + case _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].SORT: + params = [derivation.criteria]; + break; + case _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].GROUPBY: + operation = 'groupBy'; + params = [derivation.meta.groupByString.split(','), derivation.criteria]; + break; + default: + operation = null; + } + + return { + operation: operation, + params: params + }; +}; + +var applyExistingOperationOnModel = function applyExistingOperationOnModel(propModel, dataModel) { + var derivations = dataModel.getDerivations(); + var selectionModel = propModel; + + derivations.forEach(function (derivation) { + if (!derivation) { + return; + } + + var _getDerivationArgumen = getDerivationArguments(derivation), + operation = _getDerivationArgumen.operation, + params = _getDerivationArgumen.params; + + if (operation) { + var _selectionModel; + + selectionModel = (_selectionModel = selectionModel)[operation].apply(_selectionModel, _toConsumableArray(params).concat([{ + saveChild: false + }])); + } + }); + + return selectionModel; +}; + +var getFilteredModel = function getFilteredModel(propModel, path) { + for (var i = 0, len = path.length; i < len; i++) { + var model = path[i]; + propModel = applyExistingOperationOnModel(propModel, model); + } + return propModel; +}; + +var propagateIdentifiers = function propagateIdentifiers(dataModel, propModel) { + var config = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; + var propModelInf = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; + + var nonTraversingModel = propModelInf.nonTraversingModel; + var excludeModels = propModelInf.excludeModels || []; + + if (dataModel === nonTraversingModel) { + return; + } + + var propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true; + + propagate && dataModel.handlePropagation(propModel, config); + + var children = dataModel._children; + children.forEach(function (child) { + var selectionModel = applyExistingOperationOnModel(propModel, child); + propagateIdentifiers(child, selectionModel, config, propModelInf); + }); +}; + +var getRootGroupByModel = function getRootGroupByModel(model) { + while (model._parent && model._derivation.find(function (d) { + return d.op !== _constants__WEBPACK_IMPORTED_MODULE_4__["DM_DERIVATIVES"].GROUPBY; + })) { + model = model._parent; + } + return model; +}; + +var getRootDataModel = function getRootDataModel(model) { + while (model._parent) { + model = model._parent; + } + return model; +}; + +var getPathToRootModel = function getPathToRootModel(model) { + var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : []; + + while (model._parent) { + path.push(model); + model = model._parent; + } + return path; +}; + +var propagateToAllDataModels = function propagateToAllDataModels(identifiers, rootModels, propagationInf, config) { + var criteria = void 0; + var propModel = void 0; + var propagationNameSpace = propagationInf.propagationNameSpace, + propagateToSource = propagationInf.propagateToSource; + + var propagationSourceId = propagationInf.sourceId; + var propagateInterpolatedValues = config.propagateInterpolatedValues; + var filterFn = function filterFn(entry) { + var filter = config.filterFn || function () { + return true; + }; + return filter(entry, config); + }; + + var criterias = []; + + if (identifiers === null && config.persistent !== true) { + criterias = [{ + criteria: [] + }]; + criteria = []; + } else { + var _ref5; + + var actionCriterias = Object.values(propagationNameSpace.mutableActions); + if (propagateToSource !== false) { + actionCriterias = actionCriterias.filter(function (d) { + return d.config.sourceId !== propagationSourceId; + }); + } + + var filteredCriteria = actionCriterias.filter(filterFn).map(function (action) { + return action.config.criteria; + }); + + var excludeModels = []; + + if (propagateToSource !== false) { + var sourceActionCriterias = Object.values(propagationNameSpace.mutableActions); + + sourceActionCriterias.forEach(function (actionInf) { + var actionConf = actionInf.config; + if (actionConf.applyOnSource === false && actionConf.action === config.action && actionConf.sourceId !== propagationSourceId) { + excludeModels.push(actionInf.model); + criteria = sourceActionCriterias.filter(function (d) { + return d !== actionInf; + }).map(function (d) { + return d.config.criteria; + }); + criteria.length && criterias.push({ + criteria: criteria, + models: actionInf.model, + path: getPathToRootModel(actionInf.model) + }); + } + }); + } + + criteria = (_ref5 = []).concat.apply(_ref5, [].concat(_toConsumableArray(filteredCriteria), [identifiers])).filter(function (d) { + return d !== null; + }); + criterias.push({ + criteria: criteria, + excludeModels: [].concat(excludeModels, _toConsumableArray(config.excludeModels || [])) + }); + } + + var rootModel = rootModels.model; + + var propConfig = Object.assign({ + sourceIdentifiers: identifiers, + propagationSourceId: propagationSourceId + }, config); + + var rootGroupByModel = rootModels.groupByModel; + if (propagateInterpolatedValues && rootGroupByModel) { + propModel = filterPropagationModel(rootGroupByModel, criteria, { + filterByMeasure: propagateInterpolatedValues + }); + propagateIdentifiers(rootGroupByModel, propModel, propConfig); + } + + criterias.forEach(function (inf) { + var propagationModel = filterPropagationModel(rootModel, inf.criteria); + var path = inf.path; + + if (path) { + var filteredModel = getFilteredModel(propagationModel, path.reverse()); + inf.models.handlePropagation(filteredModel, propConfig); + } else { + propagateIdentifiers(rootModel, propagationModel, propConfig, { + excludeModels: inf.excludeModels, + nonTraversingModel: propagateInterpolatedValues && rootGroupByModel + }); + } + }); +}; + +var propagateImmutableActions = function propagateImmutableActions(propagationNameSpace, rootModels, propagationInf) { + var immutableActions = propagationNameSpace.immutableActions; + + for (var action in immutableActions) { + var actionInf = immutableActions[action]; + var actionConf = actionInf.config; + var propagationSourceId = propagationInf.config.sourceId; + var filterImmutableAction = propagationInf.propConfig.filterImmutableAction ? propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true; + if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) { + var criteriaModel = actionConf.criteria; + propagateToAllDataModels(criteriaModel, rootModels, { + propagationNameSpace: propagationNameSpace, + propagateToSource: false, + sourceId: propagationSourceId + }, actionConf); + } + } +}; + +var addToPropNamespace = function addToPropNamespace(propagationNameSpace) { + var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + var model = arguments[2]; + + var sourceNamespace = void 0; + var isMutableAction = config.isMutableAction; + var criteria = config.criteria; + var key = config.action + '-' + config.sourceId; + + if (isMutableAction) { + sourceNamespace = propagationNameSpace.mutableActions; + } else { + sourceNamespace = propagationNameSpace.immutableActions; + } + + if (criteria === null) { + delete sourceNamespace[key]; + } else { + sourceNamespace[key] = { + model: model, + config: config + }; + } + + return _this; +}; + +var getNormalizedProFields = function getNormalizedProFields(projField, allFields, fieldConfig) { + var normalizedProjField = projField.reduce(function (acc, field) { + if (field.constructor.name === 'RegExp') { + acc.push.apply(acc, _toConsumableArray(allFields.filter(function (fieldName) { + return fieldName.search(field) !== -1; + }))); + } else if (field in fieldConfig) { + acc.push(field); + } + return acc; + }, []); + return Array.from(new Set(normalizedProjField)).map(function (field) { + return field.trim(); + }); +}; + +/** + * Get the numberFormatted value if numberFormat present, + * else returns the supplied value. + * @param {Object} field Field Instance + * @param {Number|String} value + * @return {Number|String} + */ +var getNumberFormattedVal = function getNumberFormattedVal(field, value) { + if (field.numberFormat) { + return field.numberFormat()(value); + } + return value; +}; + +/***/ }), + +/***/ "./src/index.js": +/*!**********************!*\ + !*** ./src/index.js ***! + \**********************/ +/*! no static exports found */ +/***/ (function(module, exports, __webpack_require__) { + +var DataModel = __webpack_require__(/*! ./export */ "./src/export.js"); + +module.exports = DataModel.default ? DataModel.default : DataModel; + +/***/ }), + +/***/ "./src/invalid-aware-types.js": +/*!************************************!*\ + !*** ./src/invalid-aware-types.js ***! + \************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +/** + * A parser to parser null, undefined, invalid and NIL values. + * + * @public + * @class + */ +var InvalidAwareTypes = function () { + _createClass(InvalidAwareTypes, null, [{ + key: 'invalidAwareVals', + + /** + * Static method which gets/sets the invalid value registry. + * + * @public + * @param {Object} config - The custom configuration supplied by user. + * @return {Object} Returns the invalid values registry. + */ + value: function invalidAwareVals(config) { + if (!config) { + return InvalidAwareTypes._invalidAwareValsMap; + } + return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config); + } + + /** + * Initialize a new instance. + * + * @public + * @param {string} value - The value of the invalid data type. + */ + + }]); + + function InvalidAwareTypes(value) { + _classCallCheck(this, InvalidAwareTypes); + + this._value = value; + } + + /** + * Returns the current value of the instance. + * + * @public + * @return {string} Returns the value of the invalid data type. + */ + + + _createClass(InvalidAwareTypes, [{ + key: 'value', + value: function value() { + return this._value; + } + + /** + * Returns the current value of the instance in string format. + * + * @public + * @return {string} Returns the value of the invalid data type. + */ + + }, { + key: 'toString', + value: function toString() { + return String(this._value); + } + }], [{ + key: 'isInvalid', + value: function isInvalid(val) { + return val instanceof InvalidAwareTypes || !!InvalidAwareTypes.invalidAwareVals()[val]; + } + }, { + key: 'getInvalidType', + value: function getInvalidType(val) { + return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val]; + } + }]); + + return InvalidAwareTypes; +}(); + +/** + * Enums for Invalid types. + */ + + +InvalidAwareTypes.NULL = new InvalidAwareTypes('null'); +InvalidAwareTypes.NA = new InvalidAwareTypes('na'); +InvalidAwareTypes.NIL = new InvalidAwareTypes('nil'); + +/** + * Default Registry for mapping the invalid values. + * + * @private + */ +InvalidAwareTypes._invalidAwareValsMap = { + invalid: InvalidAwareTypes.NA, + nil: InvalidAwareTypes.NIL, + null: InvalidAwareTypes.NULL, + undefined: InvalidAwareTypes.NA +}; + +/* harmony default export */ __webpack_exports__["default"] = (InvalidAwareTypes); + +/***/ }), + +/***/ "./src/operator/bucket-creator.js": +/*!****************************************!*\ + !*** ./src/operator/bucket-creator.js ***! + \****************************************/ +/*! exports provided: createBinnedFieldData */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "createBinnedFieldData", function() { return createBinnedFieldData; }); +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../invalid-aware-types */ "./src/invalid-aware-types.js"); +var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); + + + + +var generateBuckets = function generateBuckets(binSize, start, end) { + var buckets = []; + var next = start; + + while (next < end) { + buckets.push(next); + next += binSize; + } + buckets.push(next); + + return buckets; +}; + +var findBucketRange = function findBucketRange(bucketRanges, value) { + var leftIdx = 0; + var rightIdx = bucketRanges.length - 1; + var midIdx = void 0; + var range = void 0; + + // Here use binary search as the bucketRanges is a sorted array + while (leftIdx <= rightIdx) { + midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2); + range = bucketRanges[midIdx]; + + if (value >= range.start && value < range.end) { + return range; + } else if (value >= range.end) { + leftIdx = midIdx + 1; + } else if (value < range.start) { + rightIdx = midIdx - 1; + } + } + + return null; +}; + +/** + * Creates the bin data from input measure field and supplied configs. + * + * @param {Measure} measureField - The Measure field instance. + * @param {string} rowDiffset - The datamodel rowDiffset values. + * @param {Object} config - The config object. + * @return {Object} Returns the binned data and the corresponding bins. + */ +function createBinnedFieldData(measureField, rowDiffset, config) { + var buckets = config.buckets, + binsCount = config.binsCount, + binSize = config.binSize, + start = config.start, + end = config.end; + + var _measureField$domain = measureField.domain(), + _measureField$domain2 = _slicedToArray(_measureField$domain, 2), + dMin = _measureField$domain2[0], + dMax = _measureField$domain2[1]; + + if (!buckets) { + start = start !== 0 && (!start || start > dMin) ? dMin : start; + end = end !== 0 && (!end || end < dMax) ? dMax + 1 : end; + + if (binsCount) { + binSize = Math.ceil(Math.abs(end - start) / binsCount); + } + + buckets = generateBuckets(binSize, start, end); + } + + if (buckets[0] > dMin) { + buckets.unshift(dMin); + } + if (buckets[buckets.length - 1] <= dMax) { + buckets.push(dMax + 1); + } + + var bucketRanges = []; + for (var i = 0; i < buckets.length - 1; i++) { + bucketRanges.push({ + start: buckets[i], + end: buckets[i + 1] + }); + } + + var binnedData = []; + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(rowDiffset, function (i) { + var datum = measureField.partialField.data[i]; + if (datum instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"]) { + binnedData.push(datum); + return; + } + + var range = findBucketRange(bucketRanges, datum); + binnedData.push(range.start + '-' + range.end); + }); + + return { binnedData: binnedData, bins: buckets }; +} + +/***/ }), + +/***/ "./src/operator/compose.js": +/*!*********************************!*\ + !*** ./src/operator/compose.js ***! + \*********************************/ +/*! exports provided: select, project, bin, groupBy, compose */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "select", function() { return select; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "project", function() { return project; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "bin", function() { return bin; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "groupBy", function() { return groupBy; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "compose", function() { return compose; }); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../helper */ "./src/helper.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../constants */ "./src/constants/index.js"); +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + + + +/** + * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable + * operators are called on the instances of {@link Datamodel} and {@link Relation} class. + * + * Those same operators can be used as composable operators from `DataModel.Operators` namespace. + * + * All these operators have similar behaviour. All these operators when called with the argument returns a function + * which expects a DataModel instance. + * + * @public + * @module Operators + * @namespace DataModel + */ + +/** + * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation. + * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function. + * The returned function is called with the DataModel instance on which the action needs to be performed. + * + * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection + * function is called for each row of DataModel instance with the current row passed as argument. + * + * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry + * of rejection set. + * + * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the + * resulatant datamodel. + * + * @warning + * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose. + * + * @error + * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the + * chained version. + * + * @example + * const select = DataModel.Operators.select; + * usaCarsFn = select(fields => fields.Origin.value === 'USA'); + * usaCarsDm = usaCarsFn(dm); + * console.log(usaCarsDm); + * + * @public + * @namespace DataModel + * @module Operators + * + * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row + * ``` + * function (row, i) { ... } + * ``` + * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant + * DataModel instance + * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection + * + * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be + * applied. + */ +var select = function select() { + for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + return function (dm) { + return dm.select.apply(dm, args); + }; +}; + +/** + * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering + * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on + * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs + * to be performed. + * + * Projection expects array of fields name based on which it creates the selection and rejection set. All the field + * whose name is present in array goes in selection set and rest of the fields goes in rejection set. + * + * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the + * resulatant datamodel. + * + * @warning + * Selection and rejection set is only a logical idea for concept explanation purpose. + * + * @error + * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the + * chained version. + * + * @public + * @namespace DataModel + * @module Operators + * + * @param {Array.} projField - An array of column names in string or regular expression. + * @param {Object} [config] - An optional config to control the creation of new DataModel + * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection + * + * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be + * applied. + */ +var project = function project() { + for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { + args[_key2] = arguments[_key2]; + } + + return function (dm) { + return dm.project.apply(dm, args); + }; +}; + +/** + * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration. + * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds + * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what + * binning is and the use case. + * + * Binning can be configured by + * - providing custom bin configuration with non uniform buckets + * - providing bin count + * - providing each bin size + * + * When custom buckets are provided as part of binning configuration + * @example + * // DataModel already prepared and assigned to dm vairable + * const buckets = { + * start: 30 + * stops: [80, 100, 110] + * }; + * const config = { buckets, name: 'binnedHP' } + * const binFn = bin('horsepower', config); + * const binnedDm = binFn(dm); + * + * @text + * When `binCount` is defined as part of binning configuration + * @example + * // DataModel already prepared and assigned to dm vairable + * const config = { binCount: 5, name: 'binnedHP' } + * const binFn = bin('horsepower', config); + * const binnedDm = binFn(Dm); + * + * @text + * When `binSize` is defined as part of binning configuration + * @example + * // DataModel already prepared and assigned to dm vairable + * const config = { binSize: 200, name: 'binnedHorsepower' } + * const binnedDm = dataModel.bin('horsepower', config); + * const binnedDm = binFn(Dm); + * + * @public + * @namespace DataModel + * @module Operators + * + * @param {String} name Name of measure which will be used to create bin + * @param {Object} config Config required for bin creation + * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays + * are picked and a range is created. The first number from range is inclusive and the second number from range + * is exclusive. + * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number. + * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its + * the first value of the stop. + * @param {Number} config.binSize Bucket size for each bin + * @param {Number} config.binCount Number of bins which will be created + * @param {String} config.name Name of the new binned field to be created + * + * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be + * applied. + */ +var bin = function bin() { + for (var _len3 = arguments.length, args = Array(_len3), _key3 = 0; _key3 < _len3; _key3++) { + args[_key3] = arguments[_key3]; + } + + return function (dm) { + return dm.bin.apply(dm, args); + }; +}; + +/** + * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing + * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce + * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind + * groupBy. + * + * DataModel by default provides definition of few {@link reducer | Reducers}. + * {@link ReducerStore | User defined reducers} can also be registered. + * + * This is the chained implementation of `groupBy`. + * `groupBy` also supports {@link link_to_compose_groupBy | composability} + * + * @example + * const groupBy = DataModel.Operators.groupBy; + * const groupedFn = groupBy(['Year'], { horsepower: 'max' } ); + * groupedDM = groupByFn(dm); + * + * @public + * + * @param {Array.} fieldsArr - Array containing the name of dimensions + * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its + * not passed, or any variable is ommitted from the object, default aggregation function is used from the + * schema of the variable. + * + * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be + * applied. + */ +var groupBy = function groupBy() { + for (var _len4 = arguments.length, args = Array(_len4), _key4 = 0; _key4 < _len4; _key4++) { + args[_key4] = arguments[_key4]; + } + + return function (dm) { + return dm.groupBy.apply(dm, args); + }; +}; + +/** + * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel. + * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in + * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix). + * + * Suported operations in compose are + * - `select` + * - `project` + * - `groupBy` + * - `bin` + * - `compose` + * + * @example + * const compose = DataModel.Operators.compose; + * const select = DataModel.Operators.select; + * const project = DataModel.Operators.project; + * + * let composedFn = compose( + * select(fields => fields.netprofit.value <= 15), + * project(['netprofit', 'netsales'])); + * + * const dataModel = new DataModel(data1, schema1); + * + * let composedDm = composedFn(dataModel); + * + * @public + * @namespace DataModel + * @module Operators + * + * @param {Array.} operators: An array of operation that will be applied on the + * datatable. + * + * @returns {DataModel} Instance of resultant DataModel + */ +var compose = function compose() { + for (var _len5 = arguments.length, operations = Array(_len5), _key5 = 0; _key5 < _len5; _key5++) { + operations[_key5] = arguments[_key5]; + } + + return function (dm) { + var config = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : { saveChild: true }; + + var currentDM = dm; + var firstChild = void 0; + var derivations = []; + + operations.forEach(function (operation) { + currentDM = operation(currentDM); + derivations.push.apply(derivations, _toConsumableArray(currentDM._derivation)); + if (!firstChild) { + firstChild = currentDM; + } + }); + + if (firstChild && firstChild !== currentDM) { + firstChild.dispose(); + } + + // reset all ancestorDerivation saved in-between compose + currentDM._ancestorDerivation = []; + Object(_helper__WEBPACK_IMPORTED_MODULE_0__["persistDerivations"])(dm, currentDM, _constants__WEBPACK_IMPORTED_MODULE_1__["DM_DERIVATIVES"].COMPOSE, null, derivations); + + if (config.saveChild) { + currentDM.setParent(dm); + } else { + currentDM.setParent(null); + } + + return currentDM; + }; +}; + +/***/ }), + +/***/ "./src/operator/cross-product.js": +/*!***************************************!*\ + !*** ./src/operator/cross-product.js ***! + \***************************************/ +/*! exports provided: crossProduct */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "crossProduct", function() { return crossProduct; }); +/* harmony import */ var _datamodel__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../datamodel */ "./src/datamodel.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +/* harmony import */ var _get_common_schema__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./get-common-schema */ "./src/operator/get-common-schema.js"); +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../constants */ "./src/constants/index.js"); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../helper */ "./src/helper.js"); + + + + + + +/** + * Default filter function for crossProduct. + * + * @return {boolean} Always returns true. + */ +function defaultFilterFn() { + return true; +} + +/** + * Implementation of cross product operation between two DataModel instances. + * It internally creates the data and schema for the new DataModel. + * + * @param {DataModel} dataModel1 - The left DataModel instance. + * @param {DataModel} dataModel2 - The right DataModel instance. + * @param {Function} filterFn - The filter function which is used to filter the tuples. + * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there. + * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation. + */ +function crossProduct(dm1, dm2, filterFn) { + var replaceCommonSchema = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false; + var jointype = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : _constants__WEBPACK_IMPORTED_MODULE_4__["JOINS"].CROSS; + + var schema = []; + var data = []; + var applicableFilterFn = filterFn || defaultFilterFn; + var dm1FieldStore = dm1.getFieldspace(); + var dm2FieldStore = dm2.getFieldspace(); + var dm1FieldStoreName = dm1FieldStore.name; + var dm2FieldStoreName = dm2FieldStore.name; + var name = dm1FieldStore.name + '.' + dm2FieldStore.name; + var commonSchemaList = Object(_get_common_schema__WEBPACK_IMPORTED_MODULE_2__["getCommonSchema"])(dm1FieldStore, dm2FieldStore); + + if (dm1FieldStoreName === dm2FieldStoreName) { + throw new Error('DataModels must have different alias names'); + } + // Here prepare the schema + dm1FieldStore.fields.forEach(function (field) { + var tmpSchema = Object(_utils__WEBPACK_IMPORTED_MODULE_1__["extend2"])({}, field.schema()); + if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) { + tmpSchema.name = dm1FieldStore.name + '.' + tmpSchema.name; + } + schema.push(tmpSchema); + }); + dm2FieldStore.fields.forEach(function (field) { + var tmpSchema = Object(_utils__WEBPACK_IMPORTED_MODULE_1__["extend2"])({}, field.schema()); + if (commonSchemaList.indexOf(tmpSchema.name) !== -1) { + if (!replaceCommonSchema) { + tmpSchema.name = dm2FieldStore.name + '.' + tmpSchema.name; + schema.push(tmpSchema); + } + } else { + schema.push(tmpSchema); + } + }); + + // Here prepare Data + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_3__["rowDiffsetIterator"])(dm1._rowDiffset, function (i) { + var rowAdded = false; + var rowPosition = void 0; + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_3__["rowDiffsetIterator"])(dm2._rowDiffset, function (ii) { + var tuple = []; + var userArg = {}; + userArg[dm1FieldStoreName] = {}; + userArg[dm2FieldStoreName] = {}; + dm1FieldStore.fields.forEach(function (field) { + tuple.push(field.partialField.data[i]); + userArg[dm1FieldStoreName][field.name()] = { + rawValue: field.partialField.data[i], + formattedValue: field.formattedData()[i] + }; + }); + dm2FieldStore.fields.forEach(function (field) { + if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) { + tuple.push(field.partialField.data[ii]); + } + userArg[dm2FieldStoreName][field.name()] = { + rawValue: field.partialField.data[ii], + formattedValue: field.formattedData()[ii] + }; + }); + + var cachedStore = {}; + var cloneProvider1 = function cloneProvider1() { + return dm1.detachedRoot(); + }; + var cloneProvider2 = function cloneProvider2() { + return dm2.detachedRoot(); + }; + + var dm1Fields = Object(_helper__WEBPACK_IMPORTED_MODULE_5__["prepareJoinData"])(userArg[dm1FieldStoreName]); + var dm2Fields = Object(_helper__WEBPACK_IMPORTED_MODULE_5__["prepareJoinData"])(userArg[dm2FieldStoreName]); + if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) { + var tupleObj = {}; + tuple.forEach(function (cellVal, iii) { + tupleObj[schema[iii].name] = cellVal; + }); + if (rowAdded && _constants__WEBPACK_IMPORTED_MODULE_4__["JOINS"].CROSS !== jointype) { + data[rowPosition] = tupleObj; + } else { + data.push(tupleObj); + rowAdded = true; + rowPosition = i; + } + } else if ((jointype === _constants__WEBPACK_IMPORTED_MODULE_4__["JOINS"].LEFTOUTER || jointype === _constants__WEBPACK_IMPORTED_MODULE_4__["JOINS"].RIGHTOUTER) && !rowAdded) { + var _tupleObj = {}; + var len = dm1FieldStore.fields.length - 1; + tuple.forEach(function (cellVal, iii) { + if (iii <= len) { + _tupleObj[schema[iii].name] = cellVal; + } else { + _tupleObj[schema[iii].name] = null; + } + }); + rowAdded = true; + rowPosition = i; + data.push(_tupleObj); + } + }); + }); + + return new _datamodel__WEBPACK_IMPORTED_MODULE_0__["default"](data, schema, { name: name }); +} + +/***/ }), + +/***/ "./src/operator/data-builder.js": +/*!**************************************!*\ + !*** ./src/operator/data-builder.js ***! + \**************************************/ +/*! exports provided: dataBuilder */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "dataBuilder", function() { return dataBuilder; }); +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _sort__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./sort */ "./src/operator/sort.js"); +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + + + +/** + * Builds the actual data array. + * + * @param {Array} fieldStore - An array of field. + * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6'; + * @param {string} colIdentifier - A string consisting of the details of which column + * to be included eg 'date,sales,profit'; + * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance. + * @param {Object} options - The options required to create the type of the data. + * @return {Object} Returns an object containing the multidimensional array and the relative schema. + */ +function dataBuilder(fieldStore, rowDiffset, colIdentifier, sortingDetails, options) { + var defOptions = { + addUid: false, + columnWise: false + }; + options = Object.assign({}, defOptions, options); + + var retObj = { + schema: [], + data: [], + uids: [] + }; + var addUid = options.addUid; + var reqSorting = sortingDetails && sortingDetails.length > 0; + // It stores the fields according to the colIdentifier argument + var tmpDataArr = []; + // Stores the fields according to the colIdentifier argument + var colIArr = colIdentifier.split(','); + + colIArr.forEach(function (colName) { + for (var i = 0; i < fieldStore.length; i += 1) { + if (fieldStore[i].name() === colName) { + tmpDataArr.push(fieldStore[i]); + break; + } + } + }); + + // Inserts the schema to the schema object + tmpDataArr.forEach(function (field) { + /** @todo Need to use extend2 here otherwise user can overwrite the schema. */ + retObj.schema.push(field.schema()); + }); + + if (addUid) { + retObj.schema.push({ + name: 'uid', + type: 'identifier' + }); + } + + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(rowDiffset, function (i) { + retObj.data.push([]); + var insertInd = retObj.data.length - 1; + var start = 0; + tmpDataArr.forEach(function (field, ii) { + retObj.data[insertInd][ii + start] = field.partialField.data[i]; + }); + if (addUid) { + retObj.data[insertInd][tmpDataArr.length] = i; + } + // Creates an array of unique identifiers for each row + retObj.uids.push(i); + + // If sorting needed then there is the need to expose the index + // mapping from the old index to its new index + if (reqSorting) { + retObj.data[insertInd].push(i); + } + }); + + // Handles the sort functionality + if (reqSorting) { + Object(_sort__WEBPACK_IMPORTED_MODULE_1__["sortData"])(retObj, sortingDetails); + } + + if (options.columnWise) { + var tmpData = Array.apply(undefined, _toConsumableArray(Array(retObj.schema.length))).map(function () { + return []; + }); + retObj.data.forEach(function (tuple) { + tuple.forEach(function (data, i) { + tmpData[i].push(data); + }); + }); + retObj.data = tmpData; + } + + return retObj; +} + +/***/ }), + +/***/ "./src/operator/difference.js": +/*!************************************!*\ + !*** ./src/operator/difference.js ***! + \************************************/ +/*! exports provided: difference */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "difference", function() { return difference; }); +/* harmony import */ var _datamodel__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../datamodel */ "./src/datamodel.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _utils_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/helper */ "./src/utils/helper.js"); + + + + + +/** + * Performs the union operation between two dm instances. + * + * @todo Fix the conflicts between union and difference terminology here. + * + * @param {dm} dm1 - The first dm instance. + * @param {dm} dm2 - The second dm instance. + * @return {dm} Returns the newly created dm after union operation. + */ +function difference(dm1, dm2) { + var hashTable = {}; + var schema = []; + var schemaNameArr = []; + var data = []; + var dm1FieldStore = dm1.getFieldspace(); + var dm2FieldStore = dm2.getFieldspace(); + var dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj(); + var dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj(); + var name = dm1FieldStore.name + ' union ' + dm2FieldStore.name; + + // For union the columns should match otherwise return a clone of the dm1 + if (!Object(_utils_helper__WEBPACK_IMPORTED_MODULE_3__["isArrEqual"])(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) { + return null; + } + + // Prepare the schema + dm1._colIdentifier.split(',').forEach(function (fieldName) { + var field = dm1FieldStoreFieldObj[fieldName]; + schema.push(Object(_utils__WEBPACK_IMPORTED_MODULE_1__["extend2"])({}, field.schema())); + schemaNameArr.push(field.schema().name); + }); + + /** + * The helper function to create the data. + * + * @param {dm} dm - The dm instance for which the data is inserted. + * @param {Object} fieldsObj - The fieldStore object format. + * @param {boolean} addData - If true only tuple will be added to the data. + */ + function prepareDataHelper(dm, fieldsObj, addData) { + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_2__["rowDiffsetIterator"])(dm._rowDiffset, function (i) { + var tuple = {}; + var hashData = ''; + schemaNameArr.forEach(function (schemaName) { + var value = fieldsObj[schemaName].partialField.data[i]; + hashData += '-' + value; + tuple[schemaName] = value; + }); + if (!hashTable[hashData]) { + if (addData) { + data.push(tuple); + } + hashTable[hashData] = true; + } + }); + } + + // Prepare the data + prepareDataHelper(dm2, dm2FieldStoreFieldObj, false); + prepareDataHelper(dm1, dm1FieldStoreFieldObj, true); + + return new _datamodel__WEBPACK_IMPORTED_MODULE_0__["default"](data, schema, { name: name }); +} + +/***/ }), + +/***/ "./src/operator/get-common-schema.js": +/*!*******************************************!*\ + !*** ./src/operator/get-common-schema.js ***! + \*******************************************/ +/*! exports provided: getCommonSchema */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getCommonSchema", function() { return getCommonSchema; }); +/** + * The helper function that returns an array of common schema + * from two fieldStore instances. + * + * @param {FieldStore} fs1 - The first FieldStore instance. + * @param {FieldStore} fs2 - The second FieldStore instance. + * @return {Array} An array containing the common schema. + */ +function getCommonSchema(fs1, fs2) { + var retArr = []; + var fs1Arr = []; + fs1.fields.forEach(function (field) { + fs1Arr.push(field.schema().name); + }); + fs2.fields.forEach(function (field) { + if (fs1Arr.indexOf(field.schema().name) !== -1) { + retArr.push(field.schema().name); + } + }); + return retArr; +} + +/***/ }), + +/***/ "./src/operator/group-by-function.js": +/*!*******************************************!*\ + !*** ./src/operator/group-by-function.js ***! + \*******************************************/ +/*! exports provided: defaultReducerName, defReducer, fnList */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "defaultReducerName", function() { return defaultReducerName; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "defReducer", function() { return sum; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fnList", function() { return fnList; }); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../invalid-aware-types */ "./src/invalid-aware-types.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +var _fnList; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + + + + +var SUM = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].SUM, + AVG = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].AVG, + FIRST = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].FIRST, + LAST = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].LAST, + COUNT = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].COUNT, + STD = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].STD, + MIN = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].MIN, + MAX = _enums__WEBPACK_IMPORTED_MODULE_2__["GROUP_BY_FUNCTIONS"].MAX; + + +function getFilteredValues(arr) { + return arr.filter(function (item) { + return !(item instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"]); + }); +} +/** + * Reducer function that returns the sum of all the values. + * + * @public + * @param {Array.} arr - The input array. + * @return {number} Returns the sum of the array. + */ +function sum(arr) { + if (Object(_utils__WEBPACK_IMPORTED_MODULE_0__["isArray"])(arr) && !(arr[0] instanceof Array)) { + var filteredNumber = getFilteredValues(arr); + var totalSum = filteredNumber.length ? filteredNumber.reduce(function (acc, curr) { + return acc + curr; + }, 0) : _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; + return totalSum; + } + return _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; +} + +/** + * Reducer function that returns the average of all the values. + * + * @public + * @param {Array.} arr - The input array. + * @return {number} Returns the mean value of the array. + */ +function avg(arr) { + if (Object(_utils__WEBPACK_IMPORTED_MODULE_0__["isArray"])(arr) && !(arr[0] instanceof Array)) { + var totalSum = sum(arr); + var len = arr.length || 1; + return Number.isNaN(totalSum) || totalSum instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"] ? _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL : totalSum / len; + } + return _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; +} + +/** + * Reducer function that gives the min value amongst all the values. + * + * @public + * @param {Array.} arr - The input array. + * @return {number} Returns the minimum value of the array. + */ +function min(arr) { + if (Object(_utils__WEBPACK_IMPORTED_MODULE_0__["isArray"])(arr) && !(arr[0] instanceof Array)) { + // Filter out undefined, null and NaN values + var filteredValues = getFilteredValues(arr); + + return filteredValues.length ? Math.min.apply(Math, _toConsumableArray(filteredValues)) : _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; + } + return _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; +} + +/** + * Reducer function that gives the max value amongst all the values. + * + * @public + * @param {Array.} arr - The input array. + * @return {number} Returns the maximum value of the array. + */ +function max(arr) { + if (Object(_utils__WEBPACK_IMPORTED_MODULE_0__["isArray"])(arr) && !(arr[0] instanceof Array)) { + // Filter out undefined, null and NaN values + var filteredValues = getFilteredValues(arr); + + return filteredValues.length ? Math.max.apply(Math, _toConsumableArray(filteredValues)) : _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; + } + return _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; +} + +/** + * Reducer function that gives the first value of the array. + * + * @public + * @param {Array} arr - The input array. + * @return {number} Returns the first value of the array. + */ +function first(arr) { + return arr[0]; +} + +/** + * Reducer function that gives the last value of the array. + * + * @public + * @param {Array} arr - The input array. + * @return {number} Returns the last value of the array. + */ +function last(arr) { + return arr[arr.length - 1]; +} + +/** + * Reducer function that gives the count value of the array. + * + * @public + * @param {Array} arr - The input array. + * @return {number} Returns the length of the array. + */ +function count(arr) { + if (Object(_utils__WEBPACK_IMPORTED_MODULE_0__["isArray"])(arr)) { + return arr.length; + } + return _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"].NULL; +} + +/** + * Calculates the variance of the input array. + * + * @param {Array.} arr - The input array. + * @return {number} Returns the variance of the input array. + */ +function variance(arr) { + var mean = avg(arr); + return avg(arr.map(function (num) { + return Math.pow(num - mean, 2); + })); +} + +/** + * Calculates the square root of the variance of the input array. + * + * @public + * @param {Array.} arr - The input array. + * @return {number} Returns the square root of the variance. + */ +function std(arr) { + return Math.sqrt(variance(arr)); +} + +var fnList = (_fnList = {}, _defineProperty(_fnList, SUM, sum), _defineProperty(_fnList, AVG, avg), _defineProperty(_fnList, MIN, min), _defineProperty(_fnList, MAX, max), _defineProperty(_fnList, FIRST, first), _defineProperty(_fnList, LAST, last), _defineProperty(_fnList, COUNT, count), _defineProperty(_fnList, STD, std), _fnList); + +var defaultReducerName = SUM; + + + +/***/ }), + +/***/ "./src/operator/group-by.js": +/*!**********************************!*\ + !*** ./src/operator/group-by.js ***! + \**********************************/ +/*! exports provided: groupBy, getFieldArr, getReducerObj */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "groupBy", function() { return groupBy; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getFieldArr", function() { return getFieldArr; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getReducerObj", function() { return getReducerObj; }); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _export__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../export */ "./src/export.js"); +/* harmony import */ var _utils_reducer_store__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/reducer-store */ "./src/utils/reducer-store.js"); +/* harmony import */ var _group_by_function__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./group-by-function */ "./src/operator/group-by-function.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); + + + + + + + + +/** + * This function sanitize the user given field and return a common Array structure field + * list + * @param {DataModel} dataModel the dataModel operating on + * @param {Array} fieldArr user input of field Array + * @return {Array} arrays of field name + */ +function getFieldArr(dataModel, fieldArr) { + var retArr = []; + var fieldStore = dataModel.getFieldspace(); + var dimensions = fieldStore.getDimension(); + + Object.entries(dimensions).forEach(function (_ref) { + var _ref2 = _slicedToArray(_ref, 1), + key = _ref2[0]; + + if (fieldArr && fieldArr.length) { + if (fieldArr.indexOf(key) !== -1) { + retArr.push(key); + } + } else { + retArr.push(key); + } + }); + + return retArr; +} + +/** + * This sanitize the reducer provide by the user and create a common type of object. + * user can give function Also + * @param {DataModel} dataModel dataModel to worked on + * @param {Object|function} [reducers={}] reducer provided by the users + * @return {Object} object containing reducer function for every measure + */ +function getReducerObj(dataModel) { + var reducers = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + var retObj = {}; + var fieldStore = dataModel.getFieldspace(); + var measures = fieldStore.getMeasure(); + var defReducer = _utils_reducer_store__WEBPACK_IMPORTED_MODULE_3__["default"].defaultReducer(); + + Object.keys(measures).forEach(function (measureName) { + if (typeof reducers[measureName] !== 'string') { + reducers[measureName] = measures[measureName].defAggFn(); + } + var reducerFn = _utils_reducer_store__WEBPACK_IMPORTED_MODULE_3__["default"].resolve(reducers[measureName]); + if (reducerFn) { + retObj[measureName] = reducerFn; + } else { + retObj[measureName] = defReducer; + reducers[measureName] = _group_by_function__WEBPACK_IMPORTED_MODULE_4__["defaultReducerName"]; + } + }); + return retObj; +} + +/** + * main function which perform the group-by operations which reduce the measures value is the + * fields are common according to the reducer function provided + * @param {DataModel} dataModel the dataModel to worked + * @param {Array} fieldArr fields according to which the groupby should be worked + * @param {Object|Function} reducers reducers function + * @param {DataModel} existingDataModel Existing datamodel instance + * @return {DataModel} new dataModel with the group by + */ +function groupBy(dataModel, fieldArr, reducers, existingDataModel) { + var sFieldArr = getFieldArr(dataModel, fieldArr); + var reducerObj = getReducerObj(dataModel, reducers); + var fieldStore = dataModel.getFieldspace(); + var fieldStoreObj = fieldStore.fieldsObj(); + var dbName = fieldStore.name; + var dimensionArr = []; + var measureArr = []; + var schema = []; + var hashMap = {}; + var data = []; + var newDataModel = void 0; + + // Prepare the schema + Object.entries(fieldStoreObj).forEach(function (_ref3) { + var _ref4 = _slicedToArray(_ref3, 2), + key = _ref4[0], + value = _ref4[1]; + + if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) { + schema.push(Object(_utils__WEBPACK_IMPORTED_MODULE_0__["extend2"])({}, value.schema())); + + switch (value.schema().type) { + case _enums__WEBPACK_IMPORTED_MODULE_5__["FieldType"].MEASURE: + measureArr.push(key); + break; + default: + case _enums__WEBPACK_IMPORTED_MODULE_5__["FieldType"].DIMENSION: + dimensionArr.push(key); + } + } + }); + // Prepare the data + var rowCount = 0; + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_1__["rowDiffsetIterator"])(dataModel._rowDiffset, function (i) { + var hash = ''; + dimensionArr.forEach(function (_) { + hash = hash + '-' + fieldStoreObj[_].partialField.data[i]; + }); + if (hashMap[hash] === undefined) { + hashMap[hash] = rowCount; + data.push({}); + dimensionArr.forEach(function (_) { + data[rowCount][_] = fieldStoreObj[_].partialField.data[i]; + }); + measureArr.forEach(function (_) { + data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]]; + }); + rowCount += 1; + } else { + measureArr.forEach(function (_) { + data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]); + }); + } + }); + + // reduction + var cachedStore = {}; + var cloneProvider = function cloneProvider() { + return dataModel.detachedRoot(); + }; + data.forEach(function (row) { + var tuple = row; + measureArr.forEach(function (_) { + tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore); + }); + }); + if (existingDataModel) { + existingDataModel.__calculateFieldspace(); + newDataModel = existingDataModel; + } else { + newDataModel = new _export__WEBPACK_IMPORTED_MODULE_2__["default"](data, schema, { name: dbName }); + } + return newDataModel; +} + + + +/***/ }), + +/***/ "./src/operator/index.js": +/*!*******************************!*\ + !*** ./src/operator/index.js ***! + \*******************************/ +/*! exports provided: createBinnedFieldData, compose, bin, select, project, groupby, calculateVariable, sort, crossProduct, dataBuilder, difference, getCommonSchema, defReducer, fnList, groupBy, getFieldArr, getReducerObj, mergeSort, naturalJoinFilter, naturalJoin, leftOuterJoin, rightOuterJoin, fullOuterJoin, rowDiffsetIterator, union */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _bucket_creator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./bucket-creator */ "./src/operator/bucket-creator.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "createBinnedFieldData", function() { return _bucket_creator__WEBPACK_IMPORTED_MODULE_0__["createBinnedFieldData"]; }); + +/* harmony import */ var _compose__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./compose */ "./src/operator/compose.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "compose", function() { return _compose__WEBPACK_IMPORTED_MODULE_1__["compose"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "bin", function() { return _compose__WEBPACK_IMPORTED_MODULE_1__["bin"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "select", function() { return _compose__WEBPACK_IMPORTED_MODULE_1__["select"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "project", function() { return _compose__WEBPACK_IMPORTED_MODULE_1__["project"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "groupby", function() { return _compose__WEBPACK_IMPORTED_MODULE_1__["groupBy"]; }); + +/* harmony import */ var _pure_operators__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./pure-operators */ "./src/operator/pure-operators.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "calculateVariable", function() { return _pure_operators__WEBPACK_IMPORTED_MODULE_2__["calculateVariable"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "sort", function() { return _pure_operators__WEBPACK_IMPORTED_MODULE_2__["sort"]; }); + +/* harmony import */ var _cross_product__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./cross-product */ "./src/operator/cross-product.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "crossProduct", function() { return _cross_product__WEBPACK_IMPORTED_MODULE_3__["crossProduct"]; }); + +/* harmony import */ var _data_builder__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./data-builder */ "./src/operator/data-builder.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "dataBuilder", function() { return _data_builder__WEBPACK_IMPORTED_MODULE_4__["dataBuilder"]; }); + +/* harmony import */ var _difference__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./difference */ "./src/operator/difference.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "difference", function() { return _difference__WEBPACK_IMPORTED_MODULE_5__["difference"]; }); + +/* harmony import */ var _get_common_schema__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./get-common-schema */ "./src/operator/get-common-schema.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "getCommonSchema", function() { return _get_common_schema__WEBPACK_IMPORTED_MODULE_6__["getCommonSchema"]; }); + +/* harmony import */ var _group_by_function__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(/*! ./group-by-function */ "./src/operator/group-by-function.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "defReducer", function() { return _group_by_function__WEBPACK_IMPORTED_MODULE_7__["defReducer"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "fnList", function() { return _group_by_function__WEBPACK_IMPORTED_MODULE_7__["fnList"]; }); + +/* harmony import */ var _group_by__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./group-by */ "./src/operator/group-by.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "groupBy", function() { return _group_by__WEBPACK_IMPORTED_MODULE_8__["groupBy"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "getFieldArr", function() { return _group_by__WEBPACK_IMPORTED_MODULE_8__["getFieldArr"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "getReducerObj", function() { return _group_by__WEBPACK_IMPORTED_MODULE_8__["getReducerObj"]; }); + +/* harmony import */ var _merge_sort__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./merge-sort */ "./src/operator/merge-sort.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "mergeSort", function() { return _merge_sort__WEBPACK_IMPORTED_MODULE_9__["mergeSort"]; }); + +/* harmony import */ var _natural_join_filter_function__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(/*! ./natural-join-filter-function */ "./src/operator/natural-join-filter-function.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "naturalJoinFilter", function() { return _natural_join_filter_function__WEBPACK_IMPORTED_MODULE_10__["naturalJoinFilter"]; }); + +/* harmony import */ var _natural_join__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(/*! ./natural-join */ "./src/operator/natural-join.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "naturalJoin", function() { return _natural_join__WEBPACK_IMPORTED_MODULE_11__["naturalJoin"]; }); + +/* harmony import */ var _outer_join__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(/*! ./outer-join */ "./src/operator/outer-join.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "leftOuterJoin", function() { return _outer_join__WEBPACK_IMPORTED_MODULE_12__["leftOuterJoin"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "rightOuterJoin", function() { return _outer_join__WEBPACK_IMPORTED_MODULE_12__["rightOuterJoin"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "fullOuterJoin", function() { return _outer_join__WEBPACK_IMPORTED_MODULE_12__["fullOuterJoin"]; }); + +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "rowDiffsetIterator", function() { return _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_13__["rowDiffsetIterator"]; }); + +/* harmony import */ var _union__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(/*! ./union */ "./src/operator/union.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "union", function() { return _union__WEBPACK_IMPORTED_MODULE_14__["union"]; }); + + + + + + + + + + + + + + + + + +/***/ }), + +/***/ "./src/operator/merge-sort.js": +/*!************************************!*\ + !*** ./src/operator/merge-sort.js ***! + \************************************/ +/*! exports provided: mergeSort */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "mergeSort", function() { return mergeSort; }); +/** + * The default sort function. + * + * @param {*} a - The first value. + * @param {*} b - The second value. + * @return {number} Returns the comparison result e.g. 1 or 0 or -1. + */ +function defSortFn(a, b) { + var a1 = "" + a; + var b1 = "" + b; + if (a1 < b1) { + return -1; + } + if (a1 > b1) { + return 1; + } + return 0; +} + +/** + * The helper function for merge sort which creates the sorted array + * from the two halves of the input array. + * + * @param {Array} arr - The target array which needs to be merged. + * @param {number} lo - The starting index of the first array half. + * @param {number} mid - The ending index of the first array half. + * @param {number} hi - The ending index of the second array half. + * @param {Function} sortFn - The sort function. + */ +function merge(arr, lo, mid, hi, sortFn) { + var mainArr = arr; + var auxArr = []; + for (var i = lo; i <= hi; i += 1) { + auxArr[i] = mainArr[i]; + } + var a = lo; + var b = mid + 1; + + for (var _i = lo; _i <= hi; _i += 1) { + if (a > mid) { + mainArr[_i] = auxArr[b]; + b += 1; + } else if (b > hi) { + mainArr[_i] = auxArr[a]; + a += 1; + } else if (sortFn(auxArr[a], auxArr[b]) <= 0) { + mainArr[_i] = auxArr[a]; + a += 1; + } else { + mainArr[_i] = auxArr[b]; + b += 1; + } + } +} + +/** + * The helper function for merge sort which would be called + * recursively for sorting the array halves. + * + * @param {Array} arr - The target array which needs to be sorted. + * @param {number} lo - The starting index of the array half. + * @param {number} hi - The ending index of the array half. + * @param {Function} sortFn - The sort function. + * @return {Array} Returns the target array itself. + */ +function sort(arr, lo, hi, sortFn) { + if (hi === lo) { + return arr; + } + + var mid = lo + Math.floor((hi - lo) / 2); + sort(arr, lo, mid, sortFn); + sort(arr, mid + 1, hi, sortFn); + merge(arr, lo, mid, hi, sortFn); + + return arr; +} + +/** + * The implementation of merge sort. + * It is used in DataModel for stable sorting as it is not sure + * what the sorting algorithm used by browsers is stable or not. + * + * @param {Array} arr - The target array which needs to be sorted. + * @param {Function} [sortFn=defSortFn] - The sort function. + * @return {Array} Returns the input array itself in sorted order. + */ +function mergeSort(arr) { + var sortFn = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : defSortFn; + + if (arr.length > 1) { + sort(arr, 0, arr.length - 1, sortFn); + } + return arr; +} + +/***/ }), + +/***/ "./src/operator/natural-join-filter-function.js": +/*!******************************************************!*\ + !*** ./src/operator/natural-join-filter-function.js ***! + \******************************************************/ +/*! exports provided: naturalJoinFilter */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "naturalJoinFilter", function() { return naturalJoinFilter; }); +/* harmony import */ var _get_common_schema__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./get-common-schema */ "./src/operator/get-common-schema.js"); + + +/** + * The filter function used in natural join. + * It generates a function that will have the logic to join two + * DataModel instances by the process of natural join. + * + * @param {DataModel} dm1 - The left DataModel instance. + * @param {DataModel} dm2 - The right DataModel instance. + * @return {Function} Returns a function that is used in cross-product operation. + */ +function naturalJoinFilter(dm1, dm2) { + var dm1FieldStore = dm1.getFieldspace(); + var dm2FieldStore = dm2.getFieldspace(); + // const dm1FieldStoreName = dm1FieldStore.name; + // const dm2FieldStoreName = dm2FieldStore.name; + var commonSchemaArr = Object(_get_common_schema__WEBPACK_IMPORTED_MODULE_0__["getCommonSchema"])(dm1FieldStore, dm2FieldStore); + + return function (dm1Fields, dm2Fields) { + var retainTuple = true; + commonSchemaArr.forEach(function (fieldName) { + if (dm1Fields[fieldName].internalValue === dm2Fields[fieldName].internalValue && retainTuple) { + retainTuple = true; + } else { + retainTuple = false; + } + }); + return retainTuple; + }; +} + +/***/ }), + +/***/ "./src/operator/natural-join.js": +/*!**************************************!*\ + !*** ./src/operator/natural-join.js ***! + \**************************************/ +/*! exports provided: naturalJoin */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "naturalJoin", function() { return naturalJoin; }); +/* harmony import */ var _cross_product__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./cross-product */ "./src/operator/cross-product.js"); +/* harmony import */ var _natural_join_filter_function__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./natural-join-filter-function */ "./src/operator/natural-join-filter-function.js"); + + + +function naturalJoin(dataModel1, dataModel2) { + return Object(_cross_product__WEBPACK_IMPORTED_MODULE_0__["crossProduct"])(dataModel1, dataModel2, Object(_natural_join_filter_function__WEBPACK_IMPORTED_MODULE_1__["naturalJoinFilter"])(dataModel1, dataModel2), true); +} + +/***/ }), + +/***/ "./src/operator/outer-join.js": +/*!************************************!*\ + !*** ./src/operator/outer-join.js ***! + \************************************/ +/*! exports provided: leftOuterJoin, rightOuterJoin, fullOuterJoin */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "leftOuterJoin", function() { return leftOuterJoin; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "rightOuterJoin", function() { return rightOuterJoin; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "fullOuterJoin", function() { return fullOuterJoin; }); +/* harmony import */ var _cross_product__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./cross-product */ "./src/operator/cross-product.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../constants */ "./src/constants/index.js"); +/* harmony import */ var _union__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./union */ "./src/operator/union.js"); + + + + +function leftOuterJoin(dataModel1, dataModel2, filterFn) { + return Object(_cross_product__WEBPACK_IMPORTED_MODULE_0__["crossProduct"])(dataModel1, dataModel2, filterFn, false, _constants__WEBPACK_IMPORTED_MODULE_1__["JOINS"].LEFTOUTER); +} + +function rightOuterJoin(dataModel1, dataModel2, filterFn) { + return Object(_cross_product__WEBPACK_IMPORTED_MODULE_0__["crossProduct"])(dataModel2, dataModel1, filterFn, false, _constants__WEBPACK_IMPORTED_MODULE_1__["JOINS"].RIGHTOUTER); +} + +function fullOuterJoin(dataModel1, dataModel2, filterFn) { + return Object(_union__WEBPACK_IMPORTED_MODULE_2__["union"])(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn)); +} + +/***/ }), + +/***/ "./src/operator/pure-operators.js": +/*!****************************************!*\ + !*** ./src/operator/pure-operators.js ***! + \****************************************/ +/*! exports provided: calculateVariable, sort */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "calculateVariable", function() { return calculateVariable; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sort", function() { return sort; }); +/** + * Wrapper on calculateVariable() method of DataModel to behave + * the pure-function functionality. + * + * @param {Array} args - The argument list. + * @return {any} Returns the returned value of calling function. + */ +var calculateVariable = function calculateVariable() { + for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + return function (dm) { + return dm.calculateVariable.apply(dm, args); + }; +}; + +/** + * Wrapper on sort() method of DataModel to behave + * the pure-function functionality. + * + * @param {Array} args - The argument list. + * @return {any} Returns the returned value of calling function. + */ +var sort = function sort() { + for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { + args[_key2] = arguments[_key2]; + } + + return function (dm) { + return dm.sort.apply(dm, args); + }; +}; + +/***/ }), + +/***/ "./src/operator/row-diffset-iterator.js": +/*!**********************************************!*\ + !*** ./src/operator/row-diffset-iterator.js ***! + \**********************************************/ +/*! exports provided: rowDiffsetIterator */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "rowDiffsetIterator", function() { return rowDiffsetIterator; }); +/** + * Iterates through the diffSet array and call the callback with the current + * index. + * + * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'. + * @param {Function} callback - The callback function to be called with every index. + */ +function rowDiffsetIterator(rowDiffset, callback) { + if (rowDiffset.length > 0) { + var rowDiffArr = rowDiffset.split(','); + rowDiffArr.forEach(function (diffStr) { + var diffStsArr = diffStr.split('-'); + var start = +diffStsArr[0]; + var end = +(diffStsArr[1] || diffStsArr[0]); + if (end >= start) { + for (var i = start; i <= end; i += 1) { + callback(i); + } + } + }); + } +} + +/***/ }), + +/***/ "./src/operator/sort.js": +/*!******************************!*\ + !*** ./src/operator/sort.js ***! + \******************************/ +/*! exports provided: sortData */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sortData", function() { return sortData; }); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +/* harmony import */ var _merge_sort__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./merge-sort */ "./src/operator/merge-sort.js"); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../helper */ "./src/helper.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); + +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + + + + + +/** + * Generates the sorting functions to sort the data of a DataModel instance + * according to the input data type. + * + * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc. + * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'. + * @return {Function} Returns the the sorting function. + */ +function getSortFn(dataType, sortType) { + var retFunc = void 0; + + switch (dataType) { + case _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS: + case _enums__WEBPACK_IMPORTED_MODULE_0__["DimensionSubtype"].TEMPORAL: + if (sortType === 'asc') { + retFunc = function retFunc(a, b) { + return a - b; + }; + } else { + retFunc = function retFunc(a, b) { + return b - a; + }; + } + break; + default: + if (sortType === 'asc') { + retFunc = function retFunc(a, b) { + a = '' + a; + b = '' + b; + if (a === b) { + return 0; + } + return a > b ? 1 : -1; + }; + } else { + retFunc = function retFunc(a, b) { + a = '' + a; + b = '' + b; + if (a === b) { + return 0; + } + return a > b ? -1 : 1; + }; + } + } + + return retFunc; +} + +/** + * Resolves the actual sorting function based on sorting string value. + * + * @param {Object} fDetails - The target field info. + * @param {string} strSortOrder - The sort order value. + * @return {Function} Returns the sorting function. + */ +function resolveStrSortOrder(fDetails, strSortOrder) { + var sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc'; + return getSortFn(fDetails.type, sortOrder); +} + +/** + * Groups the data according to the specified target field. + * + * @param {Array} data - The input data array. + * @param {number} fieldIndex - The target field index within schema array. + * @return {Array} Returns an array containing the grouped data. + */ +function groupData(data, fieldIndex) { + var hashMap = new Map(); + var groupedData = []; + + data.forEach(function (datum) { + var fieldVal = datum[fieldIndex]; + if (hashMap.has(fieldVal)) { + groupedData[hashMap.get(fieldVal)][1].push(datum); + } else { + groupedData.push([fieldVal, [datum]]); + hashMap.set(fieldVal, groupedData.length - 1); + } + }); + + return groupedData; +} + +/** + * Creates the argument value used for sorting function when sort is done + * with another fields. + * + * @param {Array} groupedDatum - The grouped datum for a single dimension field value. + * @param {Array} targetFields - An array of the sorting fields. + * @param {Array} targetFieldDetails - An array of the sorting field details in schema. + * @return {Object} Returns an object containing the value of sorting fields and the target field name. + */ +function createSortingFnArg(groupedDatum, targetFields, targetFieldDetails) { + var arg = { + label: groupedDatum[0] + }; + + targetFields.reduce(function (acc, next, idx) { + acc[next] = groupedDatum[1].map(function (datum) { + return datum[targetFieldDetails[idx].index]; + }); + return acc; + }, arg); + + return arg; +} + +/** + * Sorts the data by applying the standard sorting mechanism. + * + * @param {Array} data - The input data array. + * @param {Array} schema - The data schema. + * @param {Array} sortingDetails - An array containing the sorting configs. + */ +function applyStandardSort(data, schema, sortingDetails) { + var fieldName = void 0; + var sortMeta = void 0; + var fDetails = void 0; + var i = sortingDetails.length - 1; + + for (; i >= 0; i--) { + fieldName = sortingDetails[i][0]; + sortMeta = sortingDetails[i][1]; + fDetails = Object(_helper__WEBPACK_IMPORTED_MODULE_2__["fieldInSchema"])(schema, fieldName); + + if (!fDetails) { + // eslint-disable-next-line no-continue + continue; + } + + if (Object(_utils__WEBPACK_IMPORTED_MODULE_3__["isCallable"])(sortMeta)) { + // eslint-disable-next-line no-loop-func + Object(_merge_sort__WEBPACK_IMPORTED_MODULE_1__["mergeSort"])(data, function (a, b) { + return sortMeta(a[fDetails.index], b[fDetails.index]); + }); + } else if (Object(_utils__WEBPACK_IMPORTED_MODULE_3__["isArray"])(sortMeta)) { + (function () { + var groupedData = groupData(data, fDetails.index); + var sortingFn = sortMeta[sortMeta.length - 1]; + var targetFields = sortMeta.slice(0, sortMeta.length - 1); + var targetFieldDetails = targetFields.map(function (f) { + return Object(_helper__WEBPACK_IMPORTED_MODULE_2__["fieldInSchema"])(schema, f); + }); + + groupedData.forEach(function (groupedDatum) { + groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails)); + }); + + Object(_merge_sort__WEBPACK_IMPORTED_MODULE_1__["mergeSort"])(groupedData, function (a, b) { + var m = a[2]; + var n = b[2]; + return sortingFn(m, n); + }); + + // Empty the array + data.length = 0; + groupedData.forEach(function (datum) { + data.push.apply(data, _toConsumableArray(datum[1])); + }); + })(); + } else { + (function () { + var sortFn = resolveStrSortOrder(fDetails, sortMeta); + // eslint-disable-next-line no-loop-func + Object(_merge_sort__WEBPACK_IMPORTED_MODULE_1__["mergeSort"])(data, function (a, b) { + return sortFn(a[fDetails.index], b[fDetails.index]); + }); + })(); + } + } +} + +/** + * Creates a map based on grouping. + * + * @param {Array} depColumns - The dependency columns' info. + * @param {Array} data - The input data. + * @param {Array} schema - The data schema. + * @param {Array} sortingDetails - The sorting details for standard sorting. + * @return {Map} Returns a map. + */ +var makeGroupMapAndSort = function makeGroupMapAndSort(depColumns, data, schema, sortingDetails) { + if (depColumns.length === 0) { + return data; + } + + var targetCol = depColumns[0]; + var map = new Map(); + + data.reduce(function (acc, currRow) { + var fVal = currRow[targetCol.index]; + if (acc.has(fVal)) { + acc.get(fVal).push(currRow); + } else { + acc.set(fVal, [currRow]); + } + return acc; + }, map); + + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = map[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var _ref = _step.value; + + var _ref2 = _slicedToArray(_ref, 2); + + var key = _ref2[0]; + var val = _ref2[1]; + + var nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails); + map.set(key, nMap); + if (Array.isArray(nMap)) { + applyStandardSort(nMap, schema, sortingDetails); + } + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator.return) { + _iterator.return(); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + + return map; +}; + +/** + * Sorts the data by retaining the position/order of a particular field. + * + * @param {Array} data - The input data array. + * @param {Array} schema - The data schema. + * @param {Array} sortingDetails - An array containing the sorting configs. + * @param {Array} depColumns - The dependency column list. + * @return {Array} Returns the sorted data. + */ +function applyGroupSort(data, schema, sortingDetails, depColumns) { + sortingDetails = sortingDetails.filter(function (detail) { + if (detail[1] === null) { + depColumns.push(detail[0]); + return false; + } + return true; + }); + if (sortingDetails.length === 0) { + return data; + } + + depColumns = depColumns.map(function (c) { + return Object(_helper__WEBPACK_IMPORTED_MODULE_2__["fieldInSchema"])(schema, c); + }); + + var sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails); + return data.map(function (row) { + var i = 0; + var nextMap = sortedGroupMap; + + while (!Array.isArray(nextMap)) { + nextMap = nextMap.get(row[depColumns[i++].index]); + } + + return nextMap.shift(); + }); +} + +/** + * Sorts the data. + * + * @param {Object} dataObj - An object containing the data and schema. + * @param {Array} sortingDetails - An array containing the sorting configs. + */ +function sortData(dataObj, sortingDetails) { + var schema = dataObj.schema, + data = dataObj.data; + + + sortingDetails = sortingDetails.filter(function (sDetial) { + return !!Object(_helper__WEBPACK_IMPORTED_MODULE_2__["fieldInSchema"])(schema, sDetial[0]); + }); + if (sortingDetails.length === 0) { + return; + } + + var groupSortingIdx = sortingDetails.findIndex(function (sDetial) { + return sDetial[1] === null; + }); + groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length; + + var standardSortingDetails = sortingDetails.slice(0, groupSortingIdx); + var groupSortingDetails = sortingDetails.slice(groupSortingIdx); + + applyStandardSort(data, schema, standardSortingDetails); + data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(function (detail) { + return detail[0]; + })); + + dataObj.uids = data.map(function (row) { + return row.pop(); + }); + dataObj.data = data; +} + +/***/ }), + +/***/ "./src/operator/union.js": +/*!*******************************!*\ + !*** ./src/operator/union.js ***! + \*******************************/ +/*! exports provided: union */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "union", function() { return union; }); +/* harmony import */ var _export__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../export */ "./src/export.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../utils */ "./src/utils/index.js"); +/* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _utils_helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../utils/helper */ "./src/utils/helper.js"); + + + + +/** + * Performs the union operation between two dm instances. + * + * @param {dm} dm1 - The first dm instance. + * @param {dm} dm2 - The second dm instance. + * @return {dm} Returns the newly created dm after union operation. + */ +function union(dm1, dm2) { + var hashTable = {}; + var schema = []; + var schemaNameArr = []; + var data = []; + var dm1FieldStore = dm1.getFieldspace(); + var dm2FieldStore = dm2.getFieldspace(); + var dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj(); + var dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj(); + var name = dm1FieldStore.name + ' union ' + dm2FieldStore.name; + + // For union the columns should match otherwise return a clone of the dm1 + if (!Object(_utils_helper__WEBPACK_IMPORTED_MODULE_3__["isArrEqual"])(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) { + return null; + } + + // Prepare the schema + dm1._colIdentifier.split(',').forEach(function (fieldName) { + var field = dm1FieldStoreFieldObj[fieldName]; + schema.push(Object(_utils__WEBPACK_IMPORTED_MODULE_1__["extend2"])({}, field.schema())); + schemaNameArr.push(field.schema().name); + }); + + /** + * The helper function to create the data. + * + * @param {dm} dm - The dm instance for which the data is inserted. + * @param {Object} fieldsObj - The fieldStore object format. + */ + function prepareDataHelper(dm, fieldsObj) { + Object(_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_2__["rowDiffsetIterator"])(dm._rowDiffset, function (i) { + var tuple = {}; + var hashData = ''; + schemaNameArr.forEach(function (schemaName) { + var value = fieldsObj[schemaName].partialField.data[i]; + hashData += '-' + value; + tuple[schemaName] = value; + }); + if (!hashTable[hashData]) { + data.push(tuple); + hashTable[hashData] = true; + } + }); + } + + // Prepare the data + prepareDataHelper(dm1, dm1FieldStoreFieldObj); + prepareDataHelper(dm2, dm2FieldStoreFieldObj); + + return new _export__WEBPACK_IMPORTED_MODULE_0__["default"](data, schema, { name: name }); +} + +/***/ }), + +/***/ "./src/relation.js": +/*!*************************!*\ + !*** ./src/relation.js ***! + \*************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./enums */ "./src/enums/index.js"); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./utils */ "./src/utils/index.js"); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./helper */ "./src/helper.js"); +/* harmony import */ var _operator__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./operator */ "./src/operator/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + + + + +/** + * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*, + * *difference* etc. + * + * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept. + * + * @class + * @public + * @module Relation + * @namespace DataModel + */ + +var Relation = function () { + + /** + * Creates a new Relation instance by providing underlying data and schema. + * + * @private + * + * @param {Object | string | Relation} data - The input tabular data in dsv or json format or + * an existing Relation instance object. + * @param {Array} schema - An array of data schema. + * @param {Object} [options] - The optional options. + */ + function Relation() { + _classCallCheck(this, Relation); + + var source = void 0; + + this._parent = null; + this._derivation = []; + this._ancestorDerivation = []; + this._children = []; + + for (var _len = arguments.length, params = Array(_len), _key = 0; _key < _len; _key++) { + params[_key] = arguments[_key]; + } + + if (params.length === 1 && (source = params[0]) instanceof Relation) { + // parent datamodel was passed as part of source + this._colIdentifier = source._colIdentifier; + this._rowDiffset = source._rowDiffset; + this._dataFormat = source._dataFormat; + this._parent = source; + this._partialFieldspace = this._parent._partialFieldspace; + this._fieldStoreName = Object(_utils__WEBPACK_IMPORTED_MODULE_1__["getUniqueId"])(); + this.__calculateFieldspace().calculateFieldsConfig(); + } else { + _helper__WEBPACK_IMPORTED_MODULE_2__["updateData"].apply(undefined, [this].concat(params)); + this._fieldStoreName = this._partialFieldspace.name; + this.__calculateFieldspace().calculateFieldsConfig(); + this._propagationNameSpace = { + mutableActions: {}, + immutableActions: {} + }; + } + } + + /** + * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array. + * + * @public + * + * @return {Array.} Array of fields schema. + * ``` + * [ + * { name: 'Name', type: 'dimension' }, + * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` }, + * { name: 'Cylinder', type: 'dimension' }, + * { name: 'Displacement', type: 'measure', defAggFn: 'max' }, + * { name: 'HorsePower', type: 'measure', defAggFn: 'max' }, + * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', }, + * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' }, + * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' }, + * { name: 'Origin' } + * ] + * ``` + */ + + + _createClass(Relation, [{ + key: 'getSchema', + value: function getSchema() { + return this.getFieldspace().fields.map(function (d) { + return d.schema(); + }); + } + + /** + * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel} + * initialization, then it returns a auto-generated name. + * + * @public + * + * @return {string} Name of the DataModel instance. + */ + + }, { + key: 'getName', + value: function getName() { + return this._fieldStoreName; + } + }, { + key: 'getFieldspace', + value: function getFieldspace() { + return this._fieldspace; + } + }, { + key: '__calculateFieldspace', + value: function __calculateFieldspace() { + this._fieldspace = Object(_helper__WEBPACK_IMPORTED_MODULE_2__["updateFields"])([this._rowDiffset, this._colIdentifier], this.getPartialFieldspace(), this._fieldStoreName); + return this; + } + }, { + key: 'getPartialFieldspace', + value: function getPartialFieldspace() { + return this._partialFieldspace; + } + + /** + * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a + * new {@link DataModel} instance containing the results. This operation is also called theta join. + * + * Cross product takes two set and create one set where each value of one set is paired with each value of another + * set. + * + * This method takes an optional predicate which filters the generated result rows. If the predicate returns true + * the combined row is included in the resulatant table. + * + * @example + * let originDM = dm.project(['Origin','Origin_Formal_Name']); + * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin']) + * + * console.log(carsDM.join(originDM))); + * + * console.log(carsDM.join(originDM, + * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin)); + * + * @text + * This is chained version of `join` operator. `join` can also be used as + * {@link link_to_join_op | functional operator}. + * + * @public + * + * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel. + * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct. + * + * @return {DataModel} New DataModel instance created after joining. + */ + + }, { + key: 'join', + value: function join(joinWith, filterFn) { + return Object(_operator__WEBPACK_IMPORTED_MODULE_3__["crossProduct"])(this, joinWith, filterFn); + } + + /** + * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed + * internally by resolving common fields are from both table and the rows with common value are included. + * + * @example + * let originDM = dm.project(['Origin','Origin_Formal_Name']); + * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin']) + * + * console.log(carsDM.naturalJoin(originDM)); + * + * @text + * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as + * {@link link_to_join_op | functional operator}. + * + * @public + * + * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is + * called will be joined. + * @return {DataModel} New DataModel instance created after joining. + */ + + }, { + key: 'naturalJoin', + value: function naturalJoin(joinWith) { + return Object(_operator__WEBPACK_IMPORTED_MODULE_3__["crossProduct"])(this, joinWith, Object(_operator__WEBPACK_IMPORTED_MODULE_3__["naturalJoinFilter"])(this, joinWith), true); + } + + /** + * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel + * instances, provided that both of the {@link DataModel} instances should have same column names. + * + * @example + * console.log(EuropeanMakerDM.union(USAMakerDM)); + * + * @text + * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as + * {@link link_to_join_op | functional operator}. + * + * @public + * + * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which + * the method is called + * + * @return {DataModel} New DataModel instance with the result of the operation + */ + + }, { + key: 'union', + value: function union(unionWith) { + return Object(_operator__WEBPACK_IMPORTED_MODULE_3__["union"])(this, unionWith); + } + + /** + * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which + * it was called but not on the one passed as argument. + * + * @example + * console.log(highPowerDM.difference(highExpensiveDM)); + * + * @text + * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as + * {@link link_to_join_op | functional operator}. + * + * @public + * + * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance + * on which the method is called + * @return {DataModel} New DataModel instance with the result of the operation + */ + + }, { + key: 'difference', + value: function difference(differenceWith) { + return Object(_operator__WEBPACK_IMPORTED_MODULE_3__["difference"])(this, differenceWith); + } + + /** + * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode + * which control which all rows should be included in the resultant DataModel instance. + * + * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection + * function is called for each row of DataModel instance with the current row passed as argument. + * + * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry + * of rejection set. + * + * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the + * resultant datamodel. + * + * @warning + * Selection and rejection set is only a logical idea for concept explanation purpose. + * + * @example + * // with selection mode NORMAL: + * const normDt = dt.select(fields => fields.Origin.value === "USA") + * console.log(normDt)); + * + * // with selection mode INVERSE: + * const inverDt = dt.select(fields => fields.Origin.value === "USA", { mode: DataModel.FilteringMode.INVERSE }) + * console.log(inverDt); + * + * // with selection mode ALL: + * const dtArr = dt.select(fields => fields.Origin.value === "USA", { mode: DataModel.FilteringMode.ALL }) + * // print the selected parts + * console.log(dtArr[0]); + * // print the inverted parts + * console.log(dtArr[1]); + * + * @text + * This is chained version of `select` operator. `select` can also be used as + * {@link link_to_join_op | functional operator}. + * + * @public + * + * @param {Function} selectFn - The predicate function which is called for each row with the current row. + * ``` + * function (row, i, cloneProvider, store) { ... } + * ``` + * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant + * DataModel instance. + * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection. + * @return {DataModel} Returns the new DataModel instance(s) after operation. + */ + + }, { + key: 'select', + value: function select(selectFn, config) { + var defConfig = { + mode: _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].NORMAL, + saveChild: true + }; + config = Object.assign({}, defConfig, config); + config.mode = config.mode || defConfig.mode; + + var cloneConfig = { saveChild: config.saveChild }; + return Object(_helper__WEBPACK_IMPORTED_MODULE_2__["cloneWithSelect"])(this, selectFn, config, cloneConfig); + } + + /** + * Retrieves a boolean value if the current {@link DataModel} instance has data. + * + * @example + * const schema = [ + * { name: 'CarName', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * const data = []; + * + * const dt = new DataModel(data, schema); + * console.log(dt.isEmpty()); + * + * @public + * + * @return {Boolean} True if the datamodel has no data, otherwise false. + */ + + }, { + key: 'isEmpty', + value: function isEmpty() { + return !this._rowDiffset.length || !this._colIdentifier.length; + } + + /** + * Creates a clone from the current DataModel instance with child parent relationship. + * + * @private + * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance. + * @return {DataModel} - Returns the newly cloned DataModel instance. + */ + + }, { + key: 'clone', + value: function clone() { + var saveChild = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; + + var clonedDm = new this.constructor(this); + if (saveChild) { + clonedDm.setParent(this); + } else { + clonedDm.setParent(null); + } + return clonedDm; + } + + /** + * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those + * or exclude those based on {@link FilteringMode} on the resultant variable. + * + * Projection expects array of fields name based on which it creates the selection and rejection set. All the field + * whose name is present in array goes in selection set and rest of the fields goes in rejection set. + * + * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the + * resulatant datamodel. + * + * @warning + * Selection and rejection set is only a logical idea for concept explanation purpose. + * + * @example + * const dm = new DataModel(data, schema); + * + * // with projection mode NORMAL: + * const normDt = dt.project(["Name", "HorsePower"]); + * console.log(normDt.getData()); + * + * // with projection mode INVERSE: + * const inverDt = dt.project(["Name", "HorsePower"], { mode: DataModel.FilteringMode.INVERSE }) + * console.log(inverDt.getData()); + * + * // with selection mode ALL: + * const dtArr = dt.project(["Name", "HorsePower"], { mode: DataModel.FilteringMode.ALL }) + * // print the normal parts + * console.log(dtArr[0].getData()); + * // print the inverted parts + * console.log(dtArr[1].getData()); + * + * @text + * This is chained version of `select` operator. `select` can also be used as + * {@link link_to_join_op | functional operator}. + * + * @public + * + * @param {Array.} projField - An array of column names in string or regular expression. + * @param {Object} [config] - An optional config to control the creation of new DataModel + * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection + * + * @return {DataModel} Returns the new DataModel instance after operation. + */ + + }, { + key: 'project', + value: function project(projField, config) { + var defConfig = { + mode: _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].NORMAL, + saveChild: true + }; + config = Object.assign({}, defConfig, config); + var fieldConfig = this.getFieldsConfig(); + var allFields = Object.keys(fieldConfig); + var _config = config, + mode = _config.mode; + + var normalizedProjField = Object(_helper__WEBPACK_IMPORTED_MODULE_2__["getNormalizedProFields"])(projField, allFields, fieldConfig); + + var dataModel = void 0; + + if (mode === _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].ALL) { + var projectionClone = Object(_helper__WEBPACK_IMPORTED_MODULE_2__["cloneWithProject"])(this, normalizedProjField, { + mode: _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].NORMAL, + saveChild: config.saveChild + }, allFields); + var rejectionClone = Object(_helper__WEBPACK_IMPORTED_MODULE_2__["cloneWithProject"])(this, normalizedProjField, { + mode: _enums__WEBPACK_IMPORTED_MODULE_0__["FilteringMode"].INVERSE, + saveChild: config.saveChild + }, allFields); + dataModel = [projectionClone, rejectionClone]; + } else { + var _projectionClone = Object(_helper__WEBPACK_IMPORTED_MODULE_2__["cloneWithProject"])(this, normalizedProjField, config, allFields); + dataModel = _projectionClone; + } + + return dataModel; + } + }, { + key: 'getFieldsConfig', + value: function getFieldsConfig() { + return this._fieldConfig; + } + }, { + key: 'calculateFieldsConfig', + value: function calculateFieldsConfig() { + this._fieldConfig = this._fieldspace.fields.reduce(function (acc, fieldObj, i) { + acc[fieldObj.name()] = { + index: i, + def: fieldObj.schema() + }; + return acc; + }, {}); + return this; + } + + /** + * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in + * the DAG. + * + * @public + */ + + }, { + key: 'dispose', + value: function dispose() { + this._parent && this._parent.removeChild(this); + this._parent = null; + this._children.forEach(function (child) { + child._parent = null; + }); + this._children = []; + } + + /** + * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * + * const dt2 = dt.select(fields => fields.Origin.value === "USA") + * dt.removeChild(dt2); + * + * @private + * + * @param {DataModel} child - Delegates the parent to remove this child. + */ + + }, { + key: 'removeChild', + value: function removeChild(child) { + var idx = this._children.findIndex(function (sibling) { + return sibling === child; + }); + idx !== -1 ? this._children.splice(idx, 1) : true; + } + + /** + * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance. + * + * @param {DataModel} parent - The datamodel instance which will act as parent. + */ + + }, { + key: 'setParent', + value: function setParent(parent) { + this._parent && this._parent.removeChild(this); + this._parent = parent; + parent && parent._children.push(this); + } + + /** + * Returns the parent {@link DataModel} instance. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * + * const dt2 = dt.select(fields => fields.Origin.value === "USA"); + * const parentDm = dt2.getParent(); + * + * @return {DataModel} Returns the parent DataModel instance. + */ + + }, { + key: 'getParent', + value: function getParent() { + return this._parent; + } + + /** + * Returns the immediate child {@link DataModel} instances. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * + * const childDm1 = dt.select(fields => fields.Origin.value === "USA"); + * const childDm2 = dt.select(fields => fields.Origin.value === "Japan"); + * const childDm3 = dt.groupBy(["Origin"]); + * + * @return {DataModel[]} Returns the immediate child DataModel instances. + */ + + }, { + key: 'getChildren', + value: function getChildren() { + return this._children; + } + + /** + * Returns the in-between operation meta data while creating the current {@link DataModel} instance. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * const dt2 = dt.select(fields => fields.Origin.value === "USA"); + * const dt3 = dt2.groupBy(["Origin"]); + * const derivations = dt3.getDerivations(); + * + * @return {Any[]} Returns the derivation meta data. + */ + + }, { + key: 'getDerivations', + value: function getDerivations() { + return this._derivation; + } + + /** + * Returns the in-between operation meta data happened from root {@link DataModel} to current instance. + * + * @example + * const schema = [ + * { name: 'Name', type: 'dimension' }, + * { name: 'HorsePower', type: 'measure' }, + * { name: "Origin", type: 'dimension' } + * ]; + * + * const data = [ + * { Name: "chevrolet chevelle malibu", Horsepower: 130, Origin: "USA" }, + * { Name: "citroen ds-21 pallas", Horsepower: 115, Origin: "Europe" }, + * { Name: "datsun pl510", Horsepower: 88, Origin: "Japan" }, + * { Name: "amc rebel sst", Horsepower: 150, Origin: "USA"}, + * ] + * + * const dt = new DataModel(data, schema); + * const dt2 = dt.select(fields => fields.Origin.value === "USA"); + * const dt3 = dt2.groupBy(["Origin"]); + * const ancDerivations = dt3.getAncestorDerivations(); + * + * @return {Any[]} Returns the previous derivation meta data. + */ + + }, { + key: 'getAncestorDerivations', + value: function getAncestorDerivations() { + return this._ancestorDerivation; + } + }]); + + return Relation; +}(); + +/* harmony default export */ __webpack_exports__["default"] = (Relation); + +/***/ }), + +/***/ "./src/stats/index.js": +/*!****************************!*\ + !*** ./src/stats/index.js ***! + \****************************/ +/*! exports provided: sum, avg, min, max, first, last, count, sd */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sum", function() { return sum; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "avg", function() { return avg; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "min", function() { return min; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "max", function() { return max; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "first", function() { return first; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "last", function() { return last; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "count", function() { return count; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "sd", function() { return sd; }); +/* harmony import */ var _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../operator/group-by-function */ "./src/operator/group-by-function.js"); + + +var sum = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].sum, + avg = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].avg, + min = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].min, + max = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].max, + first = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].first, + last = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].last, + count = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].count, + sd = _operator_group_by_function__WEBPACK_IMPORTED_MODULE_0__["fnList"].std; + + +/***/ }), + +/***/ "./src/utils/column-major.js": +/*!***********************************!*\ + !*** ./src/utils/column-major.js ***! + \***********************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * The utility function to calculate major column. + * + * @param {Object} store - The store object. + * @return {Function} Returns the push function. + */ +/* harmony default export */ __webpack_exports__["default"] = (function (store) { + var i = 0; + return function () { + for (var _len = arguments.length, fields = Array(_len), _key = 0; _key < _len; _key++) { + fields[_key] = arguments[_key]; + } + + fields.forEach(function (val, fieldIndex) { + if (!(store[fieldIndex] instanceof Array)) { + store[fieldIndex] = Array.from({ length: i }); + } + store[fieldIndex].push(val); + }); + i++; + }; +}); + +/***/ }), + +/***/ "./src/utils/date-time-formatter.js": +/*!******************************************!*\ + !*** ./src/utils/date-time-formatter.js ***! + \******************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return DateTimeFormatter; }); +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + +/** + * Creates a JS native date object from input + * + * @param {string | number | Date} date Input using which date object to be created + * @return {Date} : JS native date object + */ +function convertToNativeDate(date) { + if (date instanceof Date) { + return date; + } + + return new Date(date); +} +/** + * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned + * between 0 - 99 + * + * @param {number} n Input to be padded + * @return {string} Padded number + */ +function pad(n) { + return n < 10 ? '0' + n : n; +} +/* + * DateFormatter utility to convert any date format to any other date format + * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined + * by user in terms of token. It creates JS native date object from the user specified format. + * That native date can also be displayed + * in any specified format. + * This utility class only takes care of format conversion only + */ + +/* + * Escapes all the special character that are used in regular expression. + * Like + * RegExp.escape('sgfd-$') // Output: sgfd\-\$ + * + * @param text {String} : text which is to be escaped + */ +RegExp.escape = function (text) { + return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +}; + +/** + * DateTimeFormatter class to convert any user format of date time stamp to any other format + * of date time stamp. + * + * @param {string} format Format of the date given. For the above date, + * 'year: %Y, month: %b, day: %d'. + * @class + */ +/* istanbul ignore next */function DateTimeFormatter(format) { + this.format = format; + this.dtParams = undefined; + this.nativeDate = undefined; +} + +// The identifier of the tokens +DateTimeFormatter.TOKEN_PREFIX = '%'; + +// JS native Date constructor takes the date params (year, month, etc) in a certail sequence. +// This defines the sequence of the date parameters in the constructor. +DateTimeFormatter.DATETIME_PARAM_SEQUENCE = { + YEAR: 0, + MONTH: 1, + DAY: 2, + HOUR: 3, + MINUTE: 4, + SECOND: 5, + MILLISECOND: 6 +}; + +/* + * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it + * gives back a default value. + * + * @param: defVal {Number} : Default no if the parsing to integer is not successful + * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed. + */ +DateTimeFormatter.defaultNumberParser = function (defVal) { + return function (val) { + var parsedVal = void 0; + if (isFinite(parsedVal = parseInt(val, 10))) { + return parsedVal; + } + + return defVal; + }; +}; + +/* + * This is a default number range utility. It tries to find an element in the range. If not found it returns a + * default no as an index. + * + * @param: range {Array} : The list which is to be serached + * @param: defVal {Number} : Default no if the serach and find does not return anything + * @return {Function} : An closure function which is to be called by passing an the value which needs to be found + */ +DateTimeFormatter.defaultRangeParser = function (range, defVal) { + return function (val) { + var i = void 0; + var l = void 0; + + if (!val) { + return defVal; + } + + var nVal = val.toLowerCase(); + + for (i = 0, l = range.length; i < l; i++) { + if (range[i].toLowerCase() === nVal) { + return i; + } + } + + if (i === undefined) { + return defVal; + } + return null; + }; +}; + +/* + * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from + * the user specifed date string. This also formats the value for display purpose from native JS date. + * The definition of each token contains the following named properties + * { + * %token_name% : { + * name: name of the token, this is used in reverse lookup, + * extract: a function that returns the regular expression to extract that piece of information. All the + * regex should be gouped by using () + * parser: a function which receives value extracted by the above regex and parse it to get the date params + * formatter: a formatter function that takes milliseconds or JS Date object and format the param + * represented by the token only. + * } + * } + * + * @return {Object} : Definition of the all the supported tokens. + */ +DateTimeFormatter.getTokenDefinitions = function () { + var daysDef = { + short: ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'], + long: ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'] + }; + var monthsDef = { + short: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'], + long: ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] + }; + + var definitions = { + H: { + // 24 hours format + name: 'H', + index: 3, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + + return d.getHours().toString(); + } + }, + l: { + // 12 hours format + name: 'l', + index: 3, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var hours = d.getHours() % 12; + + return (hours === 0 ? 12 : hours).toString(); + } + }, + p: { + // AM or PM + name: 'p', + index: 3, + extract: function extract() { + return '(AM|PM)'; + }, + + parser: function parser(val) { + if (val) { + return val.toLowerCase(); + } + return null; + }, + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var hours = d.getHours(); + + return hours < 12 ? 'AM' : 'PM'; + } + }, + P: { + // am or pm + name: 'P', + index: 3, + extract: function extract() { + return '(am|pm)'; + }, + + parser: function parser(val) { + if (val) { + return val.toLowerCase(); + } + return null; + }, + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var hours = d.getHours(); + + return hours < 12 ? 'am' : 'pm'; + } + }, + M: { + // Two digit minutes 00 - 59 + name: 'M', + index: 4, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var mins = d.getMinutes(); + + return pad(mins); + } + }, + S: { + // Two digit seconds 00 - 59 + name: 'S', + index: 5, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var seconds = d.getSeconds(); + + return pad(seconds); + } + }, + K: { + // Milliseconds + name: 'K', + index: 6, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var ms = d.getMilliseconds(); + + return ms.toString(); + } + }, + a: { + // Short name of day, like Mon + name: 'a', + index: 2, + extract: function extract() { + return '(' + daysDef.short.join('|') + ')'; + }, + + parser: DateTimeFormatter.defaultRangeParser(daysDef.short), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var day = d.getDay(); + + return daysDef.short[day].toString(); + } + }, + A: { + // Long name of day, like Monday + name: 'A', + index: 2, + extract: function extract() { + return '(' + daysDef.long.join('|') + ')'; + }, + + parser: DateTimeFormatter.defaultRangeParser(daysDef.long), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var day = d.getDay(); + + return daysDef.long[day].toString(); + } + }, + e: { + // 8 of March, 11 of November + name: 'e', + index: 2, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var day = d.getDate(); + + return day.toString(); + } + }, + d: { + // 08 of March, 11 of November + name: 'd', + index: 2, + extract: function extract() { + return '(\\d+)'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var day = d.getDate(); + + return pad(day); + } + }, + b: { + // Short month, like Jan + name: 'b', + index: 1, + extract: function extract() { + return '(' + monthsDef.short.join('|') + ')'; + }, + + parser: DateTimeFormatter.defaultRangeParser(monthsDef.short), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var month = d.getMonth(); + + return monthsDef.short[month].toString(); + } + }, + B: { + // Long month, like January + name: 'B', + index: 1, + extract: function extract() { + return '(' + monthsDef.long.join('|') + ')'; + }, + + parser: DateTimeFormatter.defaultRangeParser(monthsDef.long), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var month = d.getMonth(); + + return monthsDef.long[month].toString(); + } + }, + m: { + // Two digit month of year like 01 for January + name: 'm', + index: 1, + extract: function extract() { + return '(\\d+)'; + }, + parser: function parser(val) { + return DateTimeFormatter.defaultNumberParser()(val) - 1; + }, + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var month = d.getMonth(); + + return pad(month + 1); + } + }, + y: { + // Short year like 90 for 1990 + name: 'y', + index: 0, + extract: function extract() { + return '(\\d{2})'; + }, + parser: function parser(val) { + var result = void 0; + if (val) { + var l = val.length; + val = val.substring(l - 2, l); + } + var parsedVal = DateTimeFormatter.defaultNumberParser()(val); + var presentDate = new Date(); + var presentYear = Math.trunc(presentDate.getFullYear() / 100); + + result = '' + presentYear + parsedVal; + + if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) { + result = '' + (presentYear - 1) + parsedVal; + } + return convertToNativeDate(result).getFullYear(); + }, + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var year = d.getFullYear().toString(); + var l = void 0; + + if (year) { + l = year.length; + year = year.substring(l - 2, l); + } + + return year; + } + }, + Y: { + // Long year like 1990 + name: 'Y', + index: 0, + extract: function extract() { + return '(\\d{4})'; + }, + + parser: DateTimeFormatter.defaultNumberParser(), + formatter: function formatter(val) { + var d = convertToNativeDate(val); + var year = d.getFullYear().toString(); + + return year; + } + } + }; + + return definitions; +}; + +/* + * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal + * definition to the informal notations. + * + * @return {Object} : Formal definition of the tokens + */ +DateTimeFormatter.getTokenFormalNames = function () { + var definitions = DateTimeFormatter.getTokenDefinitions(); + + return { + HOUR: definitions.H, + HOUR_12: definitions.l, + AMPM_UPPERCASE: definitions.p, + AMPM_LOWERCASE: definitions.P, + MINUTE: definitions.M, + SECOND: definitions.S, + SHORT_DAY: definitions.a, + LONG_DAY: definitions.A, + DAY_OF_MONTH: definitions.e, + DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d, + SHORT_MONTH: definitions.b, + LONG_MONTH: definitions.B, + MONTH_OF_YEAR: definitions.m, + SHORT_YEAR: definitions.y, + LONG_YEAR: definitions.Y + }; +}; + +/* + * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from + * the date time parameter array. + * + * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed + * to the resolver function in that particular sequence only. + */ +DateTimeFormatter.tokenResolver = function () { + var definitions = DateTimeFormatter.getTokenDefinitions(); + var defaultResolver = function defaultResolver() { + // eslint-disable-line require-jsdoc + var i = 0; + var arg = void 0; + var targetParam = void 0; + var l = arguments.length; + + for (; i < l; i++) { + arg = arguments.length <= i ? undefined : arguments[i]; + if (arguments.length <= i ? undefined : arguments[i]) { + targetParam = arg; + } + } + + if (!targetParam) { + return null; + } + + return targetParam[0].parser(targetParam[1]); + }; + + return { + YEAR: [definitions.y, definitions.Y, defaultResolver], + MONTH: [definitions.b, definitions.B, definitions.m, defaultResolver], + DAY: [definitions.a, definitions.A, definitions.e, definitions.d, defaultResolver], + HOUR: [definitions.H, definitions.l, definitions.p, definitions.P, function (hourFormat24, hourFormat12, ampmLower, ampmUpper) { + var targetParam = void 0; + var amOrpm = void 0; + var isPM = void 0; + var val = void 0; + + if (hourFormat12 && (amOrpm = ampmLower || ampmUpper)) { + if (amOrpm[0].parser(amOrpm[1]) === 'pm') { + isPM = true; + } + + targetParam = hourFormat12; + } else if (hourFormat12) { + targetParam = hourFormat12; + } else { + targetParam = hourFormat24; + } + + if (!targetParam) { + return null; + } + + val = targetParam[0].parser(targetParam[1]); + if (isPM) { + val += 12; + } + return val; + }], + MINUTE: [definitions.M, defaultResolver], + SECOND: [definitions.S, defaultResolver] + }; +}; + +/* + * Finds token from the format rule specified by a user. + * @param format {String} : The format of the input date specified by the user + * @return {Array} : An array of objects which contains the available token and their occurence index in the format + */ +DateTimeFormatter.findTokens = function (format) { + var tokenPrefix = DateTimeFormatter.TOKEN_PREFIX; + var definitions = DateTimeFormatter.getTokenDefinitions(); + var tokenLiterals = Object.keys(definitions); + var occurrence = []; + var i = void 0; + var forwardChar = void 0; + + while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) { + forwardChar = format[i + 1]; + if (tokenLiterals.indexOf(forwardChar) === -1) { + continue; + } + + occurrence.push({ + index: i, + token: forwardChar + }); + } + + return occurrence; +}; + +/* + * Format any JS date to a specified date given by user. + * + * @param date {Number | Date} : The date object which is to be formatted + * @param format {String} : The format using which the date will be formatted for display + */ +DateTimeFormatter.formatAs = function (date, format) { + var nDate = convertToNativeDate(date); + var occurrence = DateTimeFormatter.findTokens(format); + var definitions = DateTimeFormatter.getTokenDefinitions(); + var formattedStr = String(format); + var tokenPrefix = DateTimeFormatter.TOKEN_PREFIX; + var token = void 0; + var formattedVal = void 0; + var i = void 0; + var l = void 0; + + for (i = 0, l = occurrence.length; i < l; i++) { + token = occurrence[i].token; + formattedVal = definitions[token].formatter(nDate); + formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal); + } + + return formattedStr; +}; + +/* + * Parses the user specified date string to extract the date time params. + * + * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli] + */ +DateTimeFormatter.prototype.parse = function (dateTimeStamp, options) { + var tokenResolver = DateTimeFormatter.tokenResolver(); + var dtParams = this.extractTokenValue(dateTimeStamp); + var dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE; + var noBreak = options && options.noBreak; + var dtParamArr = []; + var args = []; + var resolverKey = void 0; + var resolverParams = void 0; + var resolverFn = void 0; + var val = void 0; + var i = void 0; + var param = void 0; + var resolvedVal = void 0; + var l = void 0; + var result = []; + + for (resolverKey in tokenResolver) { + if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { + continue; + } + + args.length = 0; + resolverParams = tokenResolver[resolverKey]; + resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0]; + + for (i = 0, l = resolverParams.length; i < l; i++) { + param = resolverParams[i]; + val = dtParams[param.name]; + + if (val === undefined) { + args.push(null); + } else { + args.push([param, val]); + } + } + + resolvedVal = resolverFn.apply(this, args); + + if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) { + break; + } + + dtParamArr[dtParamSeq[resolverKey]] = resolvedVal; + } + + if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length)) { + result.unshift(dtParamArr[0], 0, 1); + } else { + result.unshift.apply(result, dtParamArr); + } + + return result; +}; + +/* + * Extract the value of the token from user specified date time string. + * + * @return {Object} : An key value pair which contains the tokens as key and value as pair + */ +DateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) { + var format = this.format; + var definitions = DateTimeFormatter.getTokenDefinitions(); + var tokenPrefix = DateTimeFormatter.TOKEN_PREFIX; + var occurrence = DateTimeFormatter.findTokens(format); + var tokenObj = {}; + + var lastOccurrenceIndex = void 0; + var occObj = void 0; + var occIndex = void 0; + var targetText = void 0; + var regexFormat = void 0; + + var l = void 0; + var i = void 0; + + regexFormat = String(format); + + var tokenArr = occurrence.map(function (obj) { + return obj.token; + }); + var occurrenceLength = occurrence.length; + for (i = occurrenceLength - 1; i >= 0; i--) { + occIndex = occurrence[i].index; + + if (occIndex + 1 === regexFormat.length - 1) { + lastOccurrenceIndex = occIndex; + continue; + } + + if (lastOccurrenceIndex === undefined) { + lastOccurrenceIndex = regexFormat.length; + } + + targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex); + regexFormat = regexFormat.substring(0, occIndex + 2) + RegExp.escape(targetText) + regexFormat.substring(lastOccurrenceIndex, regexFormat.length); + + lastOccurrenceIndex = occIndex; + } + + for (i = 0; i < occurrenceLength; i++) { + occObj = occurrence[i]; + regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract()); + } + + var extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || []; + extractValues.shift(); + + for (i = 0, l = tokenArr.length; i < l; i++) { + tokenObj[tokenArr[i]] = extractValues[i]; + } + return tokenObj; +}; + +/* + * Give back the JS native date formed from user specified date string + * + * @return {Date} : Native JS Date + */ +DateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) { + var date = null; + if (Number.isFinite(dateTimeStamp)) { + date = new Date(dateTimeStamp); + } else if (!this.format && Date.parse(dateTimeStamp)) { + date = new Date(dateTimeStamp); + } else { + var dtParams = this.dtParams = this.parse(dateTimeStamp); + if (dtParams.length) { + this.nativeDate = new (Function.prototype.bind.apply(Date, [null].concat(_toConsumableArray(dtParams))))(); + date = this.nativeDate; + } + } + return date; +}; + +DateTimeFormatter.prototype.checkIfOnlyYear = function (len) { + return len === 1 && this.format.match(/y|Y/g).length; +}; + +/* + * Represents JS native date to a user specified format. + * + * @param format {String} : The format according to which the date is to be represented + * @return {String} : The formatted date string + */ +DateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) { + var nativeDate = void 0; + + if (dateTimeStamp) { + nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp); + } else if (!(nativeDate = this.nativeDate)) { + nativeDate = this.getNativeDate(dateTimeStamp); + } + + return DateTimeFormatter.formatAs(nativeDate, format); +}; + + + +/***/ }), + +/***/ "./src/utils/domain-generator.js": +/*!***************************************!*\ + !*** ./src/utils/domain-generator.js ***! + \***************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/** + * Generates domain for measure field. + * + * @param {Array} data - The array of data. + * @return {Array} Returns the measure domain. + */ +/* harmony default export */ __webpack_exports__["default"] = (function (data) { + var min = Number.POSITIVE_INFINITY; + var max = Number.NEGATIVE_INFINITY; + + data.forEach(function (d) { + if (d < min) { + min = d; + } + if (d > max) { + max = d; + } + }); + + return [min, max]; +}); + +/***/ }), + +/***/ "./src/utils/extend2.js": +/*!******************************!*\ + !*** ./src/utils/extend2.js ***! + \******************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return extend2; }); +var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; + +/* eslint-disable */ +var OBJECTSTRING = 'object'; +var objectToStrFn = Object.prototype.toString; +var objectToStr = '[object Object]'; +var arrayToStr = '[object Array]'; + +function checkCyclicRef(obj, parentArr) { + var i = parentArr.length; + var bIndex = -1; + + while (i) { + if (obj === parentArr[i]) { + bIndex = i; + return bIndex; + } + i -= 1; + } + + return bIndex; +} + +function merge(obj1, obj2, skipUndef, tgtArr, srcArr) { + var item, srcVal, tgtVal, str, cRef; + // check whether obj2 is an array + // if array then iterate through it's index + // **** MOOTOOLS precution + + if (!srcArr) { + tgtArr = [obj1]; + srcArr = [obj2]; + } else { + tgtArr.push(obj1); + srcArr.push(obj2); + } + + if (obj2 instanceof Array) { + for (item = 0; item < obj2.length; item += 1) { + try { + srcVal = obj1[item]; + tgtVal = obj2[item]; + } catch (e) { + continue; + } + + if ((typeof tgtVal === 'undefined' ? 'undefined' : _typeof(tgtVal)) !== OBJECTSTRING) { + if (!(skipUndef && tgtVal === undefined)) { + obj1[item] = tgtVal; + } + } else { + if (srcVal === null || (typeof srcVal === 'undefined' ? 'undefined' : _typeof(srcVal)) !== OBJECTSTRING) { + srcVal = obj1[item] = tgtVal instanceof Array ? [] : {}; + } + cRef = checkCyclicRef(tgtVal, srcArr); + if (cRef !== -1) { + srcVal = obj1[item] = tgtArr[cRef]; + } else { + merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr); + } + } + } + } else { + for (item in obj2) { + try { + srcVal = obj1[item]; + tgtVal = obj2[item]; + } catch (e) { + continue; + } + + if (tgtVal !== null && (typeof tgtVal === 'undefined' ? 'undefined' : _typeof(tgtVal)) === OBJECTSTRING) { + // Fix for issue BUG: FWXT-602 + // IE < 9 Object.prototype.toString.call(null) gives + // '[object Object]' instead of '[object Null]' + // that's why null value becomes Object in IE < 9 + str = objectToStrFn.call(tgtVal); + if (str === objectToStr) { + if (srcVal === null || (typeof srcVal === 'undefined' ? 'undefined' : _typeof(srcVal)) !== OBJECTSTRING) { + srcVal = obj1[item] = {}; + } + cRef = checkCyclicRef(tgtVal, srcArr); + if (cRef !== -1) { + srcVal = obj1[item] = tgtArr[cRef]; + } else { + merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr); + } + } else if (str === arrayToStr) { + if (srcVal === null || !(srcVal instanceof Array)) { + srcVal = obj1[item] = []; + } + cRef = checkCyclicRef(tgtVal, srcArr); + if (cRef !== -1) { + srcVal = obj1[item] = tgtArr[cRef]; + } else { + merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr); + } + } else { + obj1[item] = tgtVal; + } + } else { + if (skipUndef && tgtVal === undefined) { + continue; + } + obj1[item] = tgtVal; + } + } + } + return obj1; +} + +function extend2(obj1, obj2, skipUndef) { + //if none of the arguments are object then return back + if ((typeof obj1 === 'undefined' ? 'undefined' : _typeof(obj1)) !== OBJECTSTRING && (typeof obj2 === 'undefined' ? 'undefined' : _typeof(obj2)) !== OBJECTSTRING) { + return null; + } + + if ((typeof obj2 === 'undefined' ? 'undefined' : _typeof(obj2)) !== OBJECTSTRING || obj2 === null) { + return obj1; + } + + if ((typeof obj1 === 'undefined' ? 'undefined' : _typeof(obj1)) !== OBJECTSTRING) { + obj1 = obj2 instanceof Array ? [] : {}; + } + merge(obj1, obj2, skipUndef); + return obj1; +} + + + +/***/ }), + +/***/ "./src/utils/helper.js": +/*!*****************************!*\ + !*** ./src/utils/helper.js ***! + \*****************************/ +/*! exports provided: isArray, isObject, isString, isCallable, uniqueValues, getUniqueId, isArrEqual, formatNumber, detectDataFormat */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isArray", function() { return isArray; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isObject", function() { return isObject; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isString", function() { return isString; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isCallable", function() { return isCallable; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "uniqueValues", function() { return uniqueValues; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "getUniqueId", function() { return getUniqueId; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "isArrEqual", function() { return isArrEqual; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "formatNumber", function() { return formatNumber; }); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "detectDataFormat", function() { return detectDataFormat; }); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + + +/** + * Checks whether the value is an array. + * + * @param {*} val - The value to be checked. + * @return {boolean} Returns true if the value is an array otherwise returns false. + */ +function isArray(val) { + return Array.isArray(val); +} + +/** + * Checks whether the value is an object. + * + * @param {*} val - The value to be checked. + * @return {boolean} Returns true if the value is an object otherwise returns false. + */ +function isObject(val) { + return val === Object(val); +} + +/** + * Checks whether the value is a string value. + * + * @param {*} val - The value to be checked. + * @return {boolean} Returns true if the value is a string value otherwise returns false. + */ +function isString(val) { + return typeof val === 'string'; +} + +/** + * Checks whether the value is callable. + * + * @param {*} val - The value to be checked. + * @return {boolean} Returns true if the value is callable otherwise returns false. + */ +function isCallable(val) { + return typeof val === 'function'; +} + +/** + * Returns the unique values from the input array. + * + * @param {Array} data - The input array. + * @return {Array} Returns a new array of unique values. + */ +function uniqueValues(data) { + return [].concat(_toConsumableArray(new Set(data))); +} + +var getUniqueId = function getUniqueId() { + return 'id-' + new Date().getTime() + Math.round(Math.random() * 10000); +}; + +/** + * Checks Whether two arrays have same content. + * + * @param {Array} arr1 - The first array. + * @param {Array} arr2 - The 2nd array. + * @return {boolean} Returns whether two array have same content. + */ +function isArrEqual(arr1, arr2) { + if (!isArray(arr1) || !isArray(arr2)) { + return arr1 === arr2; + } + + if (arr1.length !== arr2.length) { + return false; + } + + for (var i = 0; i < arr1.length; i++) { + if (arr1[i] !== arr2[i]) { + return false; + } + } + + return true; +} + +/** + * It is the default number format function for the measure field type. + * + * @param {any} val - The input value. + * @return {number} Returns a number value. + */ +function formatNumber(val) { + return val; +} + +/** + * Returns the detected data format. + * + * @param {any} data - The input data to be tested. + * @return {string} Returns the data format name. + */ +var detectDataFormat = function detectDataFormat(data) { + if (isString(data)) { + return _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].DSV_STR; + } else if (isArray(data) && isArray(data[0])) { + return _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].DSV_ARR; + } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) { + return _enums__WEBPACK_IMPORTED_MODULE_0__["DataFormat"].FLAT_JSON; + } + return null; +}; + +/***/ }), + +/***/ "./src/utils/index.js": +/*!****************************!*\ + !*** ./src/utils/index.js ***! + \****************************/ +/*! exports provided: DateTimeFormatter, columnMajor, generateMeasureDomain, extend2, isArray, isObject, isString, isCallable, uniqueValues, getUniqueId, isArrEqual, formatNumber, detectDataFormat */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _date_time_formatter__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./date-time-formatter */ "./src/utils/date-time-formatter.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "DateTimeFormatter", function() { return _date_time_formatter__WEBPACK_IMPORTED_MODULE_0__["default"]; }); + +/* harmony import */ var _column_major__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./column-major */ "./src/utils/column-major.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "columnMajor", function() { return _column_major__WEBPACK_IMPORTED_MODULE_1__["default"]; }); + +/* harmony import */ var _domain_generator__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./domain-generator */ "./src/utils/domain-generator.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "generateMeasureDomain", function() { return _domain_generator__WEBPACK_IMPORTED_MODULE_2__["default"]; }); + +/* harmony import */ var _extend2__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./extend2 */ "./src/utils/extend2.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "extend2", function() { return _extend2__WEBPACK_IMPORTED_MODULE_3__["default"]; }); + +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./helper */ "./src/utils/helper.js"); +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "isArray", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["isArray"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "isObject", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["isObject"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "isString", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["isString"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "isCallable", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["isCallable"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "uniqueValues", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["uniqueValues"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "getUniqueId", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["getUniqueId"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "isArrEqual", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["isArrEqual"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "formatNumber", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["formatNumber"]; }); + +/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "detectDataFormat", function() { return _helper__WEBPACK_IMPORTED_MODULE_4__["detectDataFormat"]; }); + + + + + + + +/***/ }), + +/***/ "./src/utils/reducer-store.js": +/*!************************************!*\ + !*** ./src/utils/reducer-store.js ***! + \************************************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _operator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../operator */ "./src/operator/index.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + +/** + * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one + * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel + * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton. + * + * @public + * @namespace DataModel + */ + +var ReducerStore = function () { + function ReducerStore() { + var _this = this; + + _classCallCheck(this, ReducerStore); + + this.store = new Map(); + this.store.set('defReducer', _operator__WEBPACK_IMPORTED_MODULE_0__["defReducer"]); + + Object.entries(_operator__WEBPACK_IMPORTED_MODULE_0__["fnList"]).forEach(function (key) { + _this.store.set(key[0], key[1]); + }); + } + + /** + * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the + * value of `defaultReducer` is used for aggregation. + * + * @public + * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing + * name lookup. If no name is found then it takes `sum` as the default reducer. + * @return {ReducerStore} Returns instance of the singleton store in page. + */ + + + _createClass(ReducerStore, [{ + key: 'defaultReducer', + value: function defaultReducer() { + if (!arguments.length) { + return this.store.get('defReducer'); + } + + var reducer = arguments.length <= 0 ? undefined : arguments[0]; + + if (typeof reducer === 'function') { + this.store.set('defReducer', reducer); + } else { + reducer = String(reducer); + if (Object.keys(_operator__WEBPACK_IMPORTED_MODULE_0__["fnList"]).indexOf(reducer) !== -1) { + this.store.set('defReducer', _operator__WEBPACK_IMPORTED_MODULE_0__["fnList"][reducer]); + } else { + throw new Error('Reducer ' + reducer + ' not found in registry'); + } + } + return this; + } + + /** + * + * Registers a {@link reducer | reducer}. + * A {@link reducer | reducer} has to be registered before it is used. + * + * @example + * // find the mean squared value of a given set + * const reducerStore = DataModel.Reducers(); + * + * reducers.register('meanSquared', (arr) => { + * const squaredVal = arr.map(item => item * item); + * let sum = 0; + * for (let i = 0, l = squaredVal.length; i < l; i++) { + * sum += squaredVal[i++]; + * } + * + * return sum; + * }) + * + * // datamodel (dm) is already prepared with cars.json + * const dm1 = dm.groupBy(['origin'], { + * accleration: 'meanSquared' + * }); + * + * @public + * + * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new + * definition. + * @param {Function} reducer definition of {@link reducer} function. + * + * @return {Function} function for unregistering the reducer. + */ + + }, { + key: 'register', + value: function register(name, reducer) { + var _this2 = this; + + if (typeof reducer !== 'function') { + throw new Error('Reducer should be a function'); + } + + name = String(name); + this.store.set(name, reducer); + + return function () { + _this2.__unregister(name); + }; + } + }, { + key: '__unregister', + value: function __unregister(name) { + if (this.store.has(name)) { + this.store.delete(name); + } + } + }, { + key: 'resolve', + value: function resolve(name) { + if (name instanceof Function) { + return name; + } + return this.store.get(name); + } + }]); + + return ReducerStore; +}(); + +var reducerStore = function () { + var store = null; + + function getStore() { + if (store === null) { + store = new ReducerStore(); + } + return store; + } + return getStore(); +}(); + +/* harmony default export */ __webpack_exports__["default"] = (reducerStore); + +/***/ }), + +/***/ "./src/value.js": +/*!**********************!*\ + !*** ./src/value.js ***! + \**********************/ +/*! exports provided: default */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./helper */ "./src/helper.js"); +var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + + + +/** + * The wrapper class on top of the primitive value of a field. + * + * @todo Need to have support for StringValue, NumberValue, DateTimeValue + * and GeoValue. These types should expose predicate API mostly. + */ + +var Value = function () { + + /** + * Creates new Value instance. + * + * @param {*} val - the primitive value from the field cell. + * @param {string | Field} field - The field from which the value belongs. + */ + function Value(value, rawValue, field) { + _classCallCheck(this, Value); + + var formattedValue = Object(_helper__WEBPACK_IMPORTED_MODULE_0__["getNumberFormattedVal"])(field, value); + + Object.defineProperties(this, { + _value: { + enumerable: false, + configurable: false, + writable: false, + value: value + }, + _formattedValue: { + enumerable: false, + configurable: false, + writable: false, + value: formattedValue + }, + _internalValue: { + enumerable: false, + configurable: false, + writable: false, + value: rawValue + } + }); + + this.field = field; + } + + /** + * Returns the field value. + * + * @return {*} Returns the current value. + */ + + + _createClass(Value, [{ + key: 'toString', + + + /** + * Converts to human readable string. + * + * @override + * @return {string} Returns a human readable string of the field value. + * + */ + value: function toString() { + return String(this.value); + } + + /** + * Returns the value of the field. + * + * @override + * @return {*} Returns the field value. + */ + + }, { + key: 'valueOf', + value: function valueOf() { + return this.value; + } + }, { + key: 'value', + get: function get() { + return this._value; + } + + /** + * Returns the parsed value of field + */ + + }, { + key: 'formattedValue', + get: function get() { + return this._formattedValue; + } + + /** + * Returns the internal value of field + */ + + }, { + key: 'internalValue', + get: function get() { + return this._internalValue; + } + }]); + + return Value; +}(); + +/* harmony default export */ __webpack_exports__["default"] = (Value); + +/***/ }) + +/******/ }); +}); //# sourceMappingURL=datamodel.js.map \ No newline at end of file diff --git a/dist/datamodel.js.map b/dist/datamodel.js.map index 6ea996b..687fae0 100644 --- a/dist/datamodel.js.map +++ b/dist/datamodel.js.map @@ -1 +1 @@ -{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/defaultConverters/autoCoverter.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","len","store","fields","forEach","fieldIndex","Array","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","rawValue","formattedValue","getNumberFormattedVal","defineProperties","_value","configurable","writable","_formattedValue","_internalValue","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","formattedData","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","dataType","sortType","retFunc","getSortFn","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","applyStandardSort","sortingDetails","fieldName","sortMeta","fieldInSchema","sortingFn","slice","f","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","sortData","dataObj","filter","sDetial","groupSortingIdx","findIndex","standardSortingDetails","groupSortingDetails","detail","sortedGroupMap","row","nextMap","applyGroupSort","uids","pop","dataBuilder","colIdentifier","addUid","columnWise","retObj","reqSorting","tmpDataArr","colName","insertInd","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","filteredValues","min","max","sqrt","mean","num","variance","defaultReducerName","ReducerStore","defReducer","entries","reducer","__unregister","delete","Function","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","rowCount","hash","_","cachedStore","cloneProvider","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","internalValue","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","PartialField","_sanitize","Field","subtype","description","displayName","_params","_context","build","Dimension","_cachedDomain","calculateDataDomain","Measure","unit","numberFormat","FieldParser","CategoricalParser","isInvalid","getInvalidType","trim","Categorical","Set","domain","add","TemporalParser","_dtf","Temporal","_cachedMinDiff","sortedData","arrLn","minDiff","POSITIVE_INFINITY","prevDatum","nextDatum","processedCount","dataFormat","parsedDatum","BinnedParser","matched","parseFloat","Binned","binsArr","bins","ContinuousParser","Continuous","NEGATIVE_INFINITY","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","fieldRegistry","createFields","dataColumn","headers","headersObj","header","BUILDER","createUnitField","DataConverter","_type","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","JSON","stringify","inferColumns","rows","columnSet","column","width","formatDate","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","N","I","eof","eol","j","preformatBody","formatValue","formatRow","test","convert","customConverter","concat","formatBody","formatRows","csv","dsv","tsv","DSVArr","schemaFields","unitSchema","firstRowHeader","columnMajor","headerMap","h","schemaField","headIndex","DSVStr","fieldSeparator","d3Dsv","DSVStringConverter","FlatJSON","insertionIndex","schemaFieldsName","JSONConverter","DSVArrayConverter","Auto","converters","AutoDataConverter","DataConverterStore","_getDefaultConverters","converter","converterStore","prepareSelectionData","rawData","resp","updateFields","partialFieldspace","fieldStoreName","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistCurrentDerivation","model","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","persistDerivations","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","getPartialFieldspace","formattedFieldsData","rawFieldsData","cloneWithAllFields","clone","calculateFieldsConfig","getKey","fn","filterPropagationModel","propModels","fns","filterByMeasure","clonedModel","modelFieldsConfig","getFieldsConfig","propModel","keyFn","getData","fieldsConfig","dLen","indices","fieldsSpace","v","valuesMap","present","every","select","saveChild","some","addDiffsetToClonedDm","selectConfig","cloneWithProject","projField","allFields","cloned","projectionSet","actualProjField","splitWithProject","projFieldSet","projFields","sanitizeUnitSchema","sanitizeAndValidateSchema","validateUnitSchema","updateData","relation","defaultConfig","dataHeader","fieldNameAs","as","resolveFieldName","nameSpace","_partialFieldspace","valueObjects","_cachedValueObjects","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","derivation","params","groupByString","getDerivationArguments","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","propagate","handlePropagation","children","_children","child","getRootGroupByModel","_parent","find","getRootDataModel","getPathToRootModel","path","propagateToAllDataModels","identifiers","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","propagateInterpolatedValues","criterias","persistent","actionCriterias","values","mutableActions","filteredCriteria","entry","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","rootModel","propConfig","sourceIdentifiers","rootGroupByModel","groupByModel","inf","propagationModel","filteredModel","getFilteredModel","reverse","propagateImmutableActions","immutableActions","filterImmutableAction","criteriaModel","addToPropNamespace","sourceNamespace","isMutableAction","getNormalizedProFields","fieldConfig","normalizedProjField","constructor","search","Relation","source","_fieldStoreName","_propagationNameSpace","_fieldspace","joinWith","unionWith","differenceWith","defConfig","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithSelect","setParent","_fieldConfig","fieldObj","def","removeChild","sibling","parent","_onPropagation","order","withUid","getAllFields","dataGenerated","fieldNames","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","ids","fill","fieldsArr","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","cachedValueObjects","fieldinst","dependency","replaceVar","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","addField","addToNameSpace","payload","eventName","measureFieldName","binFieldName","measureField","binsCount","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","clonedDMs","splitWithSelect","uniqueFields","commonFields","normalizedProjFieldSets","fieldSet","first","last","count","sd","std","Operators","compose","operations","currentDM","firstChild","dispose","bin","project","calculateVariable","naturalJoin","fullOuterJoin","version","Stats","FieldsUtility","enums"],"mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,YAAa,GAAIH,GACE,iBAAZC,QACdA,QAAmB,UAAID,IAEvBD,EAAgB,UAAIC,IARtB,CASGK,QAAQ,WACX,O,YCTE,IAAIC,EAAmB,GAGvB,SAASC,EAAoBC,GAG5B,GAAGF,EAAiBE,GACnB,OAAOF,EAAiBE,GAAUP,QAGnC,IAAIC,EAASI,EAAiBE,GAAY,CACzCC,EAAGD,EACHE,GAAG,EACHT,QAAS,IAUV,OANAU,EAAQH,GAAUI,KAAKV,EAAOD,QAASC,EAAQA,EAAOD,QAASM,GAG/DL,EAAOQ,GAAI,EAGJR,EAAOD,QA0Df,OArDAM,EAAoBM,EAAIF,EAGxBJ,EAAoBO,EAAIR,EAGxBC,EAAoBQ,EAAI,SAASd,EAASe,EAAMC,GAC3CV,EAAoBW,EAAEjB,EAASe,IAClCG,OAAOC,eAAenB,EAASe,EAAM,CAAEK,YAAY,EAAMC,IAAKL,KAKhEV,EAAoBgB,EAAI,SAAStB,GACX,oBAAXuB,QAA0BA,OAAOC,aAC1CN,OAAOC,eAAenB,EAASuB,OAAOC,YAAa,CAAEC,MAAO,WAE7DP,OAAOC,eAAenB,EAAS,aAAc,CAAEyB,OAAO,KAQvDnB,EAAoBoB,EAAI,SAASD,EAAOE,GAEvC,GADU,EAAPA,IAAUF,EAAQnB,EAAoBmB,IAC/B,EAAPE,EAAU,OAAOF,EACpB,GAAW,EAAPE,GAA8B,iBAAVF,GAAsBA,GAASA,EAAMG,WAAY,OAAOH,EAChF,IAAII,EAAKX,OAAOY,OAAO,MAGvB,GAFAxB,EAAoBgB,EAAEO,GACtBX,OAAOC,eAAeU,EAAI,UAAW,CAAET,YAAY,EAAMK,MAAOA,IACtD,EAAPE,GAA4B,iBAATF,EAAmB,IAAI,IAAIM,KAAON,EAAOnB,EAAoBQ,EAAEe,EAAIE,EAAK,SAASA,GAAO,OAAON,EAAMM,IAAQC,KAAK,KAAMD,IAC9I,OAAOF,GAIRvB,EAAoB2B,EAAI,SAAShC,GAChC,IAAIe,EAASf,GAAUA,EAAO2B,WAC7B,WAAwB,OAAO3B,EAAgB,SAC/C,WAA8B,OAAOA,GAEtC,OADAK,EAAoBQ,EAAEE,EAAQ,IAAKA,GAC5BA,GAIRV,EAAoBW,EAAI,SAASiB,EAAQC,GAAY,OAAOjB,OAAOkB,UAAUC,eAAe1B,KAAKuB,EAAQC,IAGzG7B,EAAoBgC,EAAI,GAIjBhC,EAAoBA,EAAoBiC,EAAI,G,+jEClFrD,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,G,k3BCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCCKC,EANU,CACrBC,YAAa,cACbC,SAAU,WACVC,OAAQ,UCCGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,EAXY,CACvBC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAEC9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,EAAG,CAECM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,EAAG,CAECvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAECtG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAECvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAECzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAEC3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAEC7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAECjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAEClH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,EAAG,CAECC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAECpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAECvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,EAAG,CAECG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAECxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,EAAG,CAECjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI,UAAK8F,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAX,YAAWA,IACX,kBAASA,OAAT,YAASA,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU8C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIJ,SACAK,SACAC,SACAzE,SAcJ,OAZIqE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGvD,OAAOuD,EAAO,MACxBC,GAAO,GAGXN,EAAcE,GAEdF,EADOE,GAGOD,EAGbD,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCM,IACAzE,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkB+F,WAAa,SAAU9F,GAQrC,IAPA,IAAM+F,EAAchG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCmE,EAAgBzJ,OAAO0J,KAAK1B,GAC5B2B,EAAa,GACfrK,SACAsK,UAEItK,EAAImE,EAAOoG,QAAQL,EAAalK,EAAI,KAAO,GAC/CsK,EAAcnG,EAAOnE,EAAI,IACmB,IAAxCmK,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZlE,MAAOtG,EACPyK,MAAOH,IAIf,OAAOD,GASXnG,EAAkBwG,SAAW,SAAU3G,EAAMI,GACzC,IAQIlE,EARE0K,EAAQ7G,EAAoBC,GAC5BsG,EAAanG,EAAkB+F,WAAW9F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC4E,EAAeC,OAAO1G,GACpB+F,EAAchG,EAAkBU,aAClC6F,SACAK,SACA9K,SAGJ,IAAKA,EAAI,EAAGC,EAAIoK,EAAWtE,OAAQ/F,EAAIC,EAAGD,IAEtC8K,EAAepC,EADf+B,EAAQJ,EAAWrK,GAAGyK,OACYhE,UAAUkE,GAC5CC,EAAeA,EAAajG,QAAQ,IAAIH,OAAO0F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX1G,EAAkBtC,UAAUmJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM1B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK8G,kBAAkBF,GAClCG,EAAajH,EAAkBW,wBAC/BuG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAlG,SACAvF,SACA0L,SACAC,SACA1L,SACA+H,EAAS,GAEb,IAAKuD,KAAehC,EAChB,GAAK,GAAG1H,eAAe1B,KAAKoJ,EAAegC,GAA3C,CAMA,IAJAD,EAAKvF,OAAS,EAEd0F,GADAD,EAAiBjC,EAAcgC,IACHK,OAAOJ,EAAezF,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIuL,EAAezF,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNqH,EAAQF,EAAexL,IACFO,OAGjB+K,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOnG,IAM1B,GAAI,OAFJoG,EAAcF,EAAWI,MAAMzH,KAAMkH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWtF,QAAU3B,KAAK0H,gBAAgBT,EAAWtF,QAErDiC,EAAO+D,QAAQV,EAAW,GAAI,EAAG,GAEjCrD,EAAO+D,QAAP,MAAA/D,EAAkBqD,GAGfrD,GAQX9D,EAAkBtC,UAAUsJ,kBAAoB,SAAUF,GACtD,IAYI/K,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCkE,EAAchG,EAAkBU,aAChCyF,EAAanG,EAAkB+F,WAAW9F,GAC1C6H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGArM,SAEJqM,EAAcxB,OAAO1G,GAErB,IAAMmI,EAAWjC,EAAWkC,KAAI,SAAAC,GAAA,OAAOA,EAAI/B,SACrCgC,EAAmBpC,EAAWtE,OACpC,IAAK/F,EAAIyM,EAAmB,EAAGzM,GAAK,EAAGA,KACnCmM,EAAW9B,EAAWrK,GAAGsG,OAEV,IAAM+F,EAAYtG,OAAS,QAKdzB,IAAxB2H,IACAA,EAAsBI,EAAYtG,QAGtCqG,EAAaC,EAAYpE,UAAUkE,EAAW,EAAGF,GACjDI,EAAcA,EAAYpE,UAAU,EAAGkE,EAAW,GAC9C3H,OAAOC,OAAO2H,GACdC,EAAYpE,UAAUgE,EAAqBI,EAAYtG,QAE3DkG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKnM,EAAI,EAAGA,EAAIyM,EAAkBzM,IAC9BkM,EAAS7B,EAAWrK,GACpBqM,EAAcA,EAAY1H,QAAQuF,EAAcgC,EAAOzB,MAAO/B,EAAYwD,EAAOzB,OAAOlE,WAG5F,IAAMmG,EAAgB1B,EAAc2B,MAAM,IAAInI,OAAO6H,KAAiB,GAGtE,IAFAK,EAAcE,QAET5M,EAAI,EAAGC,EAAIqM,EAASvG,OAAQ/F,EAAIC,EAAGD,IACpCgM,EAASM,EAAStM,IAAM0M,EAAc1M,GAE1C,OAAOgM,GAQX9H,EAAkBtC,UAAUiL,cAAgB,SAAU7B,GAClD,IAAIjH,EAAO,KACX,GAAI+I,OAAOrH,SAASuF,GAChBjH,EAAO,IAAIC,KAAKgH,QACb,IAAK5G,KAAKD,QAAUH,KAAK+G,MAAMC,GAClCjH,EAAO,IAAIC,KAAKgH,OAEf,CACD,IAAM3G,EAAWD,KAAKC,SAAWD,KAAK2G,MAAMC,GACxC3G,EAAS0B,SACT3B,KAAKG,WAAL,kCAAsBP,KAAtB,c,sHAAA,CAA8BK,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUkK,gBAAkB,SAASiB,GACnD,OAAe,IAARA,GAAa3I,KAAKD,OAAOwI,MAAM,QAAQ5G,QASlD7B,EAAkBtC,UAAU8I,SAAW,SAAUvG,EAAQ6G,GACrD,IAAIzG,SAQJ,OANIyG,EACAzG,EAAaH,KAAKG,WAAaH,KAAKyI,cAAc7B,IACzCzG,EAAaH,KAAKG,cAC3BA,EAAaH,KAAKyI,cAAc7B,IAG7B9G,EAAkBwG,SAASnG,EAAYJ,ICruBnC,eAAC6I,GACZ,IAAIhN,EAAI,EACR,OAAO,WAAe,2BAAXiN,EAAW,qBAAXA,EAAW,gBAClBA,EAAOC,SAAQ,SAAC3H,EAAK4H,GACXH,EAAMG,aAAuBC,QAC/BJ,EAAMG,GAAcC,MAAMC,KAAK,CAAEtH,OAAQ/F,KAE7CgN,EAAMG,GAAY3C,KAAKjF,MAE3BvF,M,4MCdFsN,EAAe,SACfC,EAAgB7M,OAAOkB,UAAU+E,SACjC6G,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAelB,EAAKmB,GAIzB,IAHA,IAAI3N,EAAI2N,EAAU5H,OACd6H,GAAU,EAEP5N,GAAG,CACN,GAAIwM,IAAQmB,EAAU3N,GAElB,OADA4N,EAAS5N,EAGbA,GAAK,EAGT,OAAO4N,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,cAAOA,MAASR,SAAgB,IAAOS,EAAP,cAAOA,MAAST,EACzC,WAGP,IAAOS,EAAP,cAAOA,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,cAAOA,MAASR,IAChBQ,EAAOC,aAAgBX,MAAQ,GAAK,IAnH5C,SAASa,EAAMH,EAAMC,EAAMC,EAAWE,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAO1D,KAAKsD,GACZK,EAAO3D,KAAKuD,KALZG,EAAS,CAACJ,GACVK,EAAS,CAACJ,IAOVA,aAAgBX,MAChB,IAAKgB,EAAO,EAAGA,EAAOL,EAAKhI,OAAQqI,GAAQ,EAAG,CAC1C,IACIC,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,eAGA,IAAO6G,EAAP,cAAOA,MAAWhB,EACZU,QAAwB1J,IAAXgK,IACfR,EAAKM,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQE,aAAkBlB,MAAQ,GAAK,KAG3C,KADdoB,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,SAMrD,IAAKC,KAAQL,EAAM,CACf,IACIM,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,SAGJ,GAAe,OAAX6G,SAAmB,IAAOA,EAAP,cAAOA,MAAWhB,GAKrCiB,EAAMhB,EAAcpN,KAAKmO,MACbd,GACO,OAAXa,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAGxCI,IAAQd,GACE,OAAXY,GAAqBA,aAAkBjB,QACvCiB,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAI7CL,EAAKM,GAAQE,MAGhB,CACD,GAAIN,QAAwB1J,IAAXgK,EACb,SAEJR,EAAKM,GAAQE,GAIzB,OAAOR,EAiBPG,CAAMH,EAAMC,EAAMC,GACXF,GCnIJ,SAASW,EAASlJ,GACrB,OAAO6H,MAAMqB,QAAQlJ,GA2ClB,IAAMmJ,EAAc,wBAAY,IAAI1K,MAAO2K,UAAYvG,KAAKwG,MAAsB,IAAhBxG,KAAKyG,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKhJ,SAAWiJ,EAAKjJ,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAI+O,EAAKhJ,OAAQ/F,IAC7B,GAAI+O,EAAK/O,KAAOgP,EAAKhP,GACjB,OAAO,EAIf,OAAO,EASJ,SAASiP,EAAa1J,GACzB,OAAOA,EASJ,IAAM2J,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFhN,EAAWE,QACXoM,EAAQU,IAASV,EAAQU,EAAK,IAC9BhN,EAAWG,QACXmM,EAAQU,KAA0B,IAAhBA,EAAKpJ,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4B6J,CAASD,EAAK,KACrDhN,EAAWC,UAEf,MChDIiN,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUhP,GACvB,IAAMiP,EAASjP,GAAQmO,IA4CvB,OA1CAtK,KAAK+K,KAAKK,GAAU,CAChBjP,KAAMiP,EACNvC,OAAQsC,EAERE,UAJgB,WAKZ,IAAIA,EAAYrL,KAAKsL,iBAQrB,OANKD,IACDA,EAAYrL,KAAKsL,iBAAmB,GACpCtL,KAAK6I,OAAOC,SAAQ,SAACyC,GACjBF,EAAUE,EAAMpP,QAAUoP,MAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBzL,KAAK0L,eAUzB,OARKD,IACDA,EAAgBzL,KAAK0L,eAAiB,GACtC1L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUC,UAClC8M,EAAcF,EAAMpP,QAAUoP,OAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkB9L,KAAK+L,iBAU3B,OARK/L,KAAK+L,mBACND,EAAkB9L,KAAK+L,iBAAmB,GAC1C/L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUE,YAClCkN,EAAgBP,EAAMpP,QAAUoP,OAIrCO,IAGR9L,KAAK+K,KAAKK,K,yPCqCVY,E,WAxEX,WAAanP,EAAOoP,EAAUV,I,4FAAO,SACjC,IAAMW,EAAiBC,GAAsBZ,EAAO1O,GAEpDP,OAAO8P,iBAAiBpM,KAAM,CAC1BqM,OAAQ,CACJ7P,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,SAEJ2P,gBAAiB,CACbhQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOqP,GAEXO,eAAgB,CACZjQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOoP,KAIfjM,KAAKuL,MAAQA,E,6CAkCb,OAAO9E,OAAOzG,KAAKnD,S,gCAUnB,OAAOmD,KAAKnD,Q,4BAnCZ,OAAOmD,KAAKqM,S,qCAOZ,OAAOrM,KAAKwM,kB,oCAOZ,OAAOxM,KAAKyM,mB,KCxDb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWhL,OAAS,GACDgL,EAAWE,MAAM,KACzB/D,SAAQ,SAACgE,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAIpR,EAAIoR,EAAOpR,GAAKqR,EAAKrR,GAAK,EAC/BgR,EAAShR,M,6PCVvBsR,E,WAqBF,WAAarQ,I,4FAAO,SAChBmD,KAAKqM,OAASxP,E,wDAdOsQ,GACrB,OAAKA,EAGE7Q,OAAO8Q,OAAOF,EAAkBG,qBAAsBF,GAFlDD,EAAkBG,yB,mCAsB7B,OAAOrN,KAAKqM,S,iCAUZ,OAAO5F,OAAOzG,KAAKqM,W,iCAGNlL,GACb,OAAQA,aAAe+L,KAAwBA,EAAkBI,mBAAmBnM,K,qCAGlEA,GAClB,OAAOA,aAAe+L,EAAoB/L,EAAM+L,EAAkBI,mBAAmBnM,O,KAO7F+L,EAAkBK,KAAO,IAAIL,EAAkB,QAC/CA,EAAkBM,GAAK,IAAIN,EAAkB,MAC7CA,EAAkBO,IAAM,IAAIP,EAAkB,OAO9CA,EAAkBG,qBAAuB,CACrCK,QAASR,EAAkBM,GAC3BG,IAAKT,EAAkBO,IACvBG,KAAMV,EAAkBK,KACxBrN,UAAWgN,EAAkBM,IAGlBN,Q,8YC5ETW,EAAkB,SAACC,EAASd,EAAOC,GAIrC,IAHA,IAAMc,EAAU,GACZC,EAAOhB,EAEJgB,EAAOf,GACVc,EAAQ3H,KAAK4H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ3H,KAAK4H,GAEND,GAGLE,EAAkB,SAACC,EAAcrR,GAOnC,IANA,IAAIsR,EAAU,EACVC,EAAWF,EAAavM,OAAS,EACjC0M,SACA7M,SAGG2M,GAAWC,GAAU,CAIxB,GAAIvR,IAFJ2E,EAAQ0M,EADRG,EAASF,EAAUnK,KAAKsK,OAAOF,EAAWD,GAAW,KAGlCnB,OAASnQ,EAAQ2E,EAAMyL,IACtC,OAAOzL,EACA3E,GAAS2E,EAAMyL,IACtBkB,EAAUE,EAAS,EACZxR,EAAQ2E,EAAMwL,QACrBoB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAUME,EAAiB,CAC1BC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,MACLC,KAAM,QAGGC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,M,wHCzBF,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAI1G,OAAOC,SAAQ,SAACyC,GAChBmE,EAAOtJ,KAAKmF,EAAMI,SAASxP,SAE/BqT,EAAI3G,OAAOC,SAAQ,SAACyC,IAC6B,IAAzCmE,EAAOvJ,QAAQoF,EAAMI,SAASxP,OAC9BsT,EAAOrJ,KAAKmF,EAAMI,SAASxP,SAG5BsT,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqD,wDAAxBC,EAAwB,uDAAblB,EAAMC,MACtFrD,EAAS,GACTZ,EAAO,GACPmF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAchU,KAClCoU,EAAoBF,EAAclU,KAClCA,EAAUgU,EAAchU,KAAxB,IAAgCkU,EAAclU,KAC9CqU,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CAqFpB,OAlFAN,EAActH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,OAAiB6T,IACpDU,EAAUvU,KAAUgU,EAAchU,KAAlC,IAA0CuU,EAAUvU,MAExDwP,EAAOvF,KAAKsK,MAEhBL,EAAcxH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMmF,EAAYjH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C6E,EAAiBrK,QAAQuK,EAAUvU,MAC9B6T,IACDU,EAAUvU,KAAUkU,EAAclU,KAAlC,IAA0CuU,EAAUvU,KACpDwP,EAAOvF,KAAKsK,IAGhB/E,EAAOvF,KAAKsK,MAKpBhE,EAAmBmD,EAAIc,aAAa,SAAC/U,GACjC,IAAIgV,GAAW,EACXC,SACJnE,EAAmBoD,EAAIa,aAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAActH,OAAOC,SAAQ,SAACyC,GAC1BwF,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAKnP,IACnCoV,EAAQV,GAAmB/E,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAKnP,GAClCsQ,eAAgBX,EAAM2F,gBAAgBtV,OAG9CyU,EAAcxH,OAAOC,SAAQ,SAACyC,IAC+B,IAAnDiF,EAAiBrK,QAAQoF,EAAMI,SAASxP,OAAgB6T,GAC1De,EAAM3K,KAAKmF,EAAM0F,aAAalG,KAAK+F,IAEvCE,EAAQT,GAAmBhF,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM0F,aAAalG,KAAK+F,GAClC5E,eAAgBX,EAAM2F,gBAAgBJ,OAI9C,IAIMK,EAAYC,GAAgBJ,EAAQV,IACpCe,EAAYD,GAAgBJ,EAAQT,IAC1C,GAAIL,EAAmBiB,EAAWE,GALb,kBAAMxB,EAAIyB,kBACV,kBAAMxB,EAAIwB,iBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBR,EAAMjI,SAAQ,SAAC0I,EAASC,GACpBF,EAAS5F,EAAO8F,GAAKtV,MAAQqV,KAE7BZ,GAAY7B,EAAMC,QAAUiB,EAC5BlF,EAAK8F,GAAeU,GAGpBxG,EAAK3E,KAAKmL,GACVX,GAAW,EACXC,EAAcjV,QAEf,IAAKqU,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMW,EAAW,GACb5I,EAAMwH,EAActH,OAAOlH,OAAS,EACxCoP,EAAMjI,SAAQ,SAAC0I,EAASC,GAEhBF,EAAS5F,EAAO8F,GAAKtV,MADrBsV,GAAO9I,EACsB6I,EAGA,QAGrCZ,GAAW,EACXC,EAAcjV,EACdmP,EAAK3E,KAAKmL,UAKf,IAAI3T,GAAUmN,EAAMY,EAAQ,CAAExP,SCjHzC,SAASuV,EAAW1O,EAAGO,GACnB,IAAMoO,EAAKA,GAAG3O,EACR4O,EAAKA,GAAGrO,EACd,OAAIoO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB,uDAAXL,EAIrC,OAHII,EAAInQ,OAAS,GArBrB,SAASqQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKjO,KAAKsK,OAAO4D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACNzW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EAC3ByW,EAAOzW,GAAKwW,EAAQxW,GAKxB,IAHA,IAAIoH,EAAIiP,EACJ1O,EAAI4O,EAAM,EAELvW,EAAIqW,EAAIrW,GAAKsW,EAAItW,GAAK,EACvBoH,EAAImP,GACJC,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GACEA,EAAI2O,GACXE,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,GACE+O,EAAOM,EAAOrP,GAAIqP,EAAO9O,KAAO,GACvC6O,EAAQxW,GAAKyW,EAAOrP,GACpBA,GAAK,IAELoP,EAAQxW,GAAKyW,EAAO9O,GACpBA,GAAK,GAqBbsG,CAAMiI,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAInQ,OAAS,EAAGoQ,GAE1BD,E,0gBChCX,SAASQ,EAAqBC,EAAUC,GACpC,IAAMC,EAAmD,SAAvChM,OAAO+L,GAAc9Q,cAA2B,OAAS,MAC3E,OA9CJ,SAAoBgR,EAAUC,GAC1B,IAAIC,SAEJ,OAAQF,GACR,KAAKlU,EAAeC,WACpB,KAAKL,EAAiBE,SAEdsU,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAAJ,OAAUP,EAAIO,GAEd,SAACP,EAAGO,GAAJ,OAAUA,EAAIP,GAE5B,MACJ,QAEQ4P,EADa,QAAbD,EACU,SAAC3P,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,EAAI,GAAK,GAGd,SAACP,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,GAAK,EAAI,GAKhC,OAAOqP,EAYAC,CAAUN,EAAS3G,KAAM6G,GAUpC,SAASK,EAAW/H,EAAMhC,GACtB,IAAMgK,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAlI,EAAKjC,SAAQ,SAACoK,GACV,IAAMC,EAAWD,EAAMnK,GACnBgK,EAAQK,IAAID,GACZF,EAAYF,EAAQtW,IAAI0W,IAAW,GAAG/M,KAAK8M,IAE3CD,EAAY7M,KAAK,CAAC+M,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAYtR,OAAS,OAI5CsR,EAYX,SAASK,EAAoBC,EAAcC,EAAcC,GACrD,IAAMpO,EAAM,CACRqO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,QAAO,SAACC,EAAK5F,EAAM6F,GAE5B,OADAD,EAAI5F,GAAQuF,EAAa,GAAGpL,KAAI,SAAA+K,GAAA,OAASA,EAAMO,EAAmBI,GAAK3R,UAChE0R,IACRvO,GAEIA,EAUX,SAASyO,EAAmB/I,EAAMY,EAAQoI,GAMtC,IALA,IAAIC,SACAC,SACA1B,SACA3W,EAAImY,EAAepS,OAAS,EAEzB/F,GAAK,EAAGA,IACXoY,EAAYD,EAAenY,GAAG,GAC9BqY,EAAWF,EAAenY,GAAG,IAC7B2W,EAAW2B,GAAcvI,EAAQqI,MVrFf,mBU4FHC,EAEXpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAU0Q,EAASjR,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAC1DmI,EAAQ4J,GAAW,WAC1B,IAAMhB,EAAcH,EAAU/H,EAAMwH,EAASrQ,OACvCiS,EAAYF,EAASA,EAAStS,OAAS,GACvC6R,EAAeS,EAASG,MAAM,EAAGH,EAAStS,OAAS,GACnD8R,EAAqBD,EAAarL,KAAI,SAAAkM,GAAA,OAAKH,GAAcvI,EAAQ0I,MAEvEpB,EAAYnK,SAAQ,SAACyK,GACjBA,EAAanN,KAAKkN,EAAmBC,EAAcC,EAAcC,OAGrE5B,EAAUoB,GAAa,SAACjQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO4Q,EAAUnY,EAAGqB,MAIxB0N,EAAKpJ,OAAS,EACdsR,EAAYnK,SAAQ,SAACoK,GACjBnI,EAAK3E,KAAL,MAAA2E,EAAA,EAAamI,EAAM,QAnBG,GAqBvB,WACH,IAAMnB,EAASO,EAAoBC,EAAU0B,GAE7CpC,EAAU9G,GAAM,SAAC/H,EAAGO,GAAJ,OAAUwO,EAAO/O,EAAEuP,EAASrQ,OAAQqB,EAAEgP,EAASrQ,WAH5D,IAiBf,I,EAAMoS,GAAsB,SAAtBA,EAAuBC,EAAYxJ,EAAMY,EAAQoI,GACnD,GAA0B,IAAtBQ,EAAW5S,OAAgB,OAAOoJ,EAEtC,IAAMyJ,EAAYD,EAAW,GACvBpM,EAAM,IAAI6K,IAEhBjI,EAAK4I,QAAO,SAACC,EAAKa,GACd,IAAMC,EAAOD,EAAQD,EAAUtS,OAM/B,OALI0R,EAAIR,IAAIsB,GACRd,EAAInX,IAAIiY,GAAMtO,KAAKqO,GAEnBb,EAAIP,IAAIqB,EAAM,CAACD,IAEZb,IACRzL,GAdmE,2BAgBtE,YAAuBA,EAAvB,+CAA4B,wBAAlBhL,EAAkB,KAAbgE,EAAa,KAClBwT,EAAOL,EAAoBC,EAAWH,MAAM,GAAIjT,EAAKwK,EAAQoI,GACnE5L,EAAIkL,IAAIlW,EAAKwX,GACT3L,MAAMqB,QAAQsK,IACdb,EAAkBa,EAAMhJ,EAAQoI,IApB8B,6EAwBtE,OAAO5L,GA2CJ,SAASyM,GAAUC,EAASd,GAAgB,IACzCpI,EAAiBkJ,EAAjBlJ,OAAQZ,EAAS8J,EAAT9J,KAGd,GAA8B,KAD9BgJ,EAAiBA,EAAee,QAAO,SAAAC,GAAA,QAAab,GAAcvI,EAAQoJ,EAAQ,QAC/DpT,OAAnB,CAEA,IAAIqT,EAAkBjB,EAAekB,WAAU,SAAAF,GAAA,OAA0B,OAAfA,EAAQ,MAClEC,GAAuC,IAArBA,EAAyBA,EAAkBjB,EAAepS,OAE5E,IAAMuT,EAAyBnB,EAAeK,MAAM,EAAGY,GACjDG,EAAsBpB,EAAeK,MAAMY,GAEjDlB,EAAkB/I,EAAMY,EAAQuJ,GAChCnK,EA5CJ,SAAyBA,EAAMY,EAAQoI,EAAgBQ,GAQnD,GAA8B,KAP9BR,EAAiBA,EAAee,QAAO,SAACM,GACpC,OAAkB,OAAdA,EAAO,KACPb,EAAWnO,KAAKgP,EAAO,KAChB,OAIIzT,OAAgB,OAAOoJ,EAE1CwJ,EAAaA,EAAWpM,KAAI,SAAAlM,GAAA,OAAKiY,GAAcvI,EAAQ1P,MAEvD,IAAMoZ,EAAiBf,GAAoBC,EAAYxJ,EAAMY,EAAQoI,GACrE,OAAOhJ,EAAK5C,KAAI,SAACmN,GAIb,IAHA,IAAI1Z,EAAI,EACJ2Z,EAAUF,GAENrM,MAAMqB,QAAQkL,IAClBA,EAAUA,EAAQ9Y,IAAI6Y,EAAIf,EAAW3Y,KAAKsG,QAG9C,OAAOqT,EAAQ/M,WAuBZgN,CAAezK,EAAMY,EAAQwJ,EAAqBD,EAAuB/M,KAAI,SAAAiN,GAAA,OAAUA,EAAO,OAErGP,EAAQY,KAAO1K,EAAK5C,KAAI,SAAAmN,GAAA,OAAOA,EAAII,SACnCb,EAAQ9J,KAAOA,GCjPZ,SAAS4K,GAAa1K,EAAY0B,EAAYiJ,EAAe7B,EAAgBlN,GAKhFA,EAAUvK,OAAO8Q,OAAO,GAJL,CACfyI,QAAQ,EACRC,YAAY,GAEwBjP,GAExC,IAAMkP,EAAS,CACXpK,OAAQ,GACRZ,KAAM,GACN0K,KAAM,IAEJI,EAAShP,EAAQgP,OACjBG,EAAajC,GAAkBA,EAAepS,OAAS,EAEvDsU,EAAa,GAiDnB,GA/CgBL,EAAc/I,MAAM,KAE5B/D,SAAQ,SAACoN,GACb,IAAK,IAAIta,EAAI,EAAGA,EAAIqP,EAAWtJ,OAAQ/F,GAAK,EACxC,GAAIqP,EAAWrP,GAAGO,SAAW+Z,EAAS,CAClCD,EAAW7P,KAAK6E,EAAWrP,IAC3B,UAMZqa,EAAWnN,SAAQ,SAACyC,GAEhBwK,EAAOpK,OAAOvF,KAAKmF,EAAMI,aAGzBkK,GACAE,EAAOpK,OAAOvF,KAAK,CACfjK,KAAM,MACNyP,KAAM,eAIdc,EAAmBC,GAAY,SAAC/Q,GAC5Bma,EAAOhL,KAAK3E,KAAK,IACjB,IAAM+P,EAAYJ,EAAOhL,KAAKpJ,OAAS,EAEvCsU,EAAWnN,SAAQ,SAACyC,EAAOuF,GACvBiF,EAAOhL,KAAKoL,GAAWrF,EAFf,GAE6BvF,EAAM0F,aAAalG,KAAKnP,MAE7Dia,IACAE,EAAOhL,KAAKoL,GAAWF,EAAWtU,QAAU/F,GAGhDma,EAAON,KAAKrP,KAAKxK,GAIboa,GAAcD,EAAOhL,KAAKoL,GAAW/P,KAAKxK,MAI9Coa,GACApB,GAASmB,EAAQhC,GAGjBlN,EAAQiP,WAAY,CACpB,IAAMM,EAAUpN,mB,sHAAAA,CAASA,MAAM+M,EAAOpK,OAAOhK,UAASwG,KAAI,iBAAM,MAChE4N,EAAOhL,KAAKjC,SAAQ,SAACiI,GACjBA,EAAMjI,SAAQ,SAACiC,EAAMnP,GACjBwa,EAAQxa,GAAGwK,KAAK2E,SAGxBgL,EAAOhL,KAAOqL,EAGlB,OAAOL,EC1EJ,SAASM,GAAYxG,EAAKC,GAC7B,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAiBX,SAAS2E,EAAkBC,EAAIvL,EAAWwL,GACtCnK,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACPD,GAAW9L,EAAK3E,KAAK2K,GACzBuF,EAAUQ,IAAY,MASlC,OAjCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA2BtCwa,EAAkB7G,EAAK2G,GAAuB,GAC9CE,EAAkB9G,EAAK2G,GAAuB,GAEvC,IAAI5Y,GAAUmN,EAAMY,EAAQ,CAAExP,S,sPC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAAS2X,GAAkBlF,GACvB,OAAOA,EAAIgD,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAShD,SAAS+J,GAAKnF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMkO,EAAiBF,GAAkBlF,GAIzC,OAHiBoF,EAAevV,OACZuV,EAAevD,QAAO,SAACC,EAAKuD,GAAN,OAAevD,EAAMuD,IAAM,GAC/CjK,EAAkBK,KAG5C,OAAOL,EAAkBK,KAU7B,SAAS6J,GAAKtF,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAC5C,IAAMqO,EAAWJ,GAAInF,GACfnJ,EAAMmJ,EAAInQ,QAAU,EAC1B,OAAQ+G,OAAO4O,MAAMD,IAAaA,aAAoBnK,EAC7CA,EAAkBK,KAAO8J,EAAW1O,EAEjD,OAAOuE,EAAkBK,KAgG7B,IAAMgK,YACDrY,GAAM+X,IADL,KAED9X,GAAMiY,IAFL,KAGDhY,IAzFL,SAAc0S,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAKyT,IAAL,MAAAzT,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA+EvB,KAIDlO,IAzEL,SAAcyS,GACV,GAAIzH,EAAQyH,MAAUA,EAAI,aAAc9I,OAAQ,CAE5C,IAAMwO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe7V,OAAUqC,KAAK0T,IAAL,MAAA1T,KAAA,GAAYwT,IAAkBtK,EAAkBK,KAErF,OAAOL,EAAkBK,QA8DvB,KAKDjO,IAzDL,SAAgBwS,GACZ,OAAOA,EAAI,MAmDT,KAMDvS,IA/CL,SAAeuS,GACX,OAAOA,EAAIA,EAAInQ,OAAS,MAwCtB,KAODnC,IArCL,SAAgBsS,GACZ,OAAIzH,EAAQyH,GACDA,EAAInQ,OAERuL,EAAkBK,QA0BvB,KAQD9N,IAbL,SAAcqS,GACV,OAAO9N,KAAK2T,KAbhB,SAAmB7F,GACf,IAAI8F,EAAOR,GAAItF,GACf,OAAOsF,GAAItF,EAAI3J,KAAI,SAAA0P,GAAA,gBAAQA,EAAMD,EAAS,OAWzBE,CAAShG,OAIxB,GAWAiG,GAAqB7Y,G,0PCzCnB0J,GAjGFoP,G,WACF,aAAe,Y,4FAAA,SACXhY,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK4I,MAAMyK,IAAI,aAAc4E,IAE7B3b,OAAO4b,QAAQX,IAAQzO,SAAQ,SAAC3L,GAC5B,EAAKyL,MAAMyK,IAAIlW,EAAI,GAAIA,EAAI,O,oDAc/B,IAAK,UAAOwE,OACR,OAAO3B,KAAK4I,MAAMnM,IAAI,cAG1B,IAAI0b,EAAUA,UAAVA,8BAEJ,GAAuB,mBAAZA,EACPnY,KAAK4I,MAAMyK,IAAI,aAAc8E,OAC1B,CAEH,GADAA,EAAU1R,OAAO0R,IAC6B,IAA1C7b,OAAO0J,KAAKuR,IAAQpR,QAAQgS,GAG5B,MAAM,IAAI1H,MAAJ,WAAqB0H,EAArB,0BAFNnY,KAAK4I,MAAMyK,IAAI,aAAckE,GAAOY,IAK5C,OAAOnY,O,+BAmCD7D,EAAMgc,GAAS,WACrB,GAAuB,mBAAZA,EACP,MAAM,IAAI1H,MAAM,gCAMpB,OAHAtU,EAAOsK,OAAOtK,GACd6D,KAAK4I,MAAMyK,IAAIlX,EAAMgc,GAEd,WAAQ,EAAKC,aAAajc,M,mCAGvBA,GACN6D,KAAK4I,MAAMwK,IAAIjX,IACf6D,KAAK4I,MAAMyP,OAAOlc,K,8BAIjBA,GACL,OAAIA,aAAgBmc,SACTnc,EAEJ6D,KAAK4I,MAAMnM,IAAIN,O,KAgBfoc,IARO,QAHd3P,GAAQ,QAIJA,GAAQ,IAAIoP,IAETpP,I,+YC5Cf,SAAS4P,GAASC,EAAWtN,EAAUuN,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWtN,GAC7B,IAAMsE,EAAS,GAEToJ,EADaJ,EAAUrI,gBACCvE,eAY9B,OAVAvP,OAAO4b,QAAQW,GAAY/P,SAAQ,YAAW,IAAT3L,EAAS,WACtCgO,GAAYA,EAASxJ,QACU,IAA3BwJ,EAAShF,QAAQhJ,IACjBsS,EAAOrJ,KAAKjJ,GAGhBsS,EAAOrJ,KAAKjJ,MAIbsS,EAyCWqJ,CAAYL,EAAWtN,GACnC4N,EAhCV,SAAwBN,GAA0B,IAAfC,EAAe,uDAAJ,GACpC3C,EAAS,GAETiD,EADaP,EAAUrI,gBACD5E,aACtByM,EAAaM,GAAaU,iBAchC,OAZA3c,OAAO0J,KAAKgT,GAAUlQ,SAAQ,SAACoQ,GACU,iBAA1BR,EAASQ,KAChBR,EAASQ,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYb,GAAac,QAAQX,EAASQ,IAC5CE,EACArD,EAAOmD,GAAeE,GAEtBrD,EAAOmD,GAAejB,EACtBS,EAASQ,GAAenB,OAGzBhC,EAcYuD,CAAcb,EAAWC,GACtCzN,EAAawN,EAAUrI,gBACvBmJ,EAAgBtO,EAAWI,YAC3BmO,EAASvO,EAAW9O,KACpBsd,EAAe,GACfC,EAAa,GACb/N,EAAS,GACToH,EAAU,GACVhI,EAAO,GACT4O,SAGJrd,OAAO4b,QAAQqB,GAAezQ,SAAQ,YAAkB,cAAhB3L,EAAgB,KAAXN,EAAW,KACpD,IAAgC,IAA5B+b,EAAUzS,QAAQhJ,IAAe4b,EAAW5b,GAG5C,OAFAwO,EAAOvF,KAAKqD,EAAQ,GAAI5M,EAAM8O,WAEtB9O,EAAM8O,SAASC,MACvB,KAAKlN,EAAUC,QACX+a,EAAWtT,KAAKjJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX6a,EAAarT,KAAKjJ,OAK9B,IAAIyc,EAAW,EACflN,EAAmB+L,EAAU9H,aAAa,SAAC/U,GACvC,IAAIie,EAAO,GACXJ,EAAa3Q,SAAQ,SAACgR,GAClBD,EAAUA,EAAV,IAAkBN,EAAcO,GAAG7I,aAAalG,KAAKnP,WAEnCsE,IAAlB6S,EAAQ8G,IACR9G,EAAQ8G,GAAQD,EAChB7O,EAAK3E,KAAK,IACVqT,EAAa3Q,SAAQ,SAACgR,GAClB/O,EAAK6O,GAAUE,GAAKP,EAAcO,GAAG7I,aAAalG,KAAKnP,MAE3D8d,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAK6O,GAAUE,GAAK,CAACP,EAAcO,GAAG7I,aAAalG,KAAKnP,OAE5Dge,GAAY,GAEZF,EAAW5Q,SAAQ,SAACgR,GAChB/O,EAAKgI,EAAQ8G,IAAOC,GAAG1T,KAAKmT,EAAcO,GAAG7I,aAAalG,KAAKnP,UAM3E,IAAIme,EAAc,GACdC,EAAgB,kBAAMvB,EAAUnH,gBAcpC,OAbAvG,EAAKjC,SAAQ,SAACwM,GACV,IAAMvE,EAAQuE,EACdoE,EAAW5Q,SAAQ,SAACgR,GAChB/I,EAAM+I,GAAKf,EAAWe,GAAGxE,EAAIwE,GAAIE,EAAeD,SAGpDpB,GACAA,EAAkBsB,wBAClBN,EAAehB,GAGfgB,EAAe,IAAI/b,GAAUmN,EAAMY,EAAQ,CAAExP,KAAMqd,IAEhDG,EC9HJ,SAASO,GAAmBrK,EAAKC,GACpC,IAIMqK,EAAkB7K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACe,EAAWE,GACf,IAAI+I,GAAc,EASlB,OARAD,EAAgBrR,SAAQ,SAACkL,GAGjBoG,IAFAjJ,EAAU6C,GAAWqG,gBACrBhJ,EAAU2C,GAAWqG,gBAAiBD,MAMvCA,GCjBR,SAASE,GAAOzK,EAAKC,GACxB,IAAMwG,EAAY,GACZ3K,EAAS,GACT4K,EAAgB,GAChBxL,EAAO,GACPoF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc9E,YACtCoL,EAAwBpG,EAAchF,YACtClP,EAAUgU,EAAchU,KAAxB,UAAsCkU,EAAclU,KAG1D,IAAKuO,EAAWmF,EAAI6G,eAAe7J,MAAM,KAAKmF,OAAQlC,EAAI4G,eAAe7J,MAAM,KAAKmF,QAChF,OAAO,KAgBX,SAAS2E,EAAmBC,EAAIvL,GAC5BqB,EAAmBkK,EAAGjG,aAAa,SAAC/U,GAChC,IAAMmV,EAAQ,GACV+F,EAAW,GACfP,EAAczN,SAAQ,SAACiO,GACnB,IAAMla,EAAQwO,EAAU0L,GAAY9F,aAAalG,KAAKnP,GACtDkb,OAAgBja,EAChBkU,EAAMgG,GAAcla,KAEnByZ,EAAUQ,KACX/L,EAAK3E,KAAK2K,GACVuF,EAAUQ,IAAY,MASlC,OAhCCjH,EAAI6G,eAAe7J,MAAM,KAAM/D,SAAQ,SAACkL,GACrC,IAAMzI,EAAQiL,EAAsBxC,GACpCrI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B4K,EAAcnQ,KAAKmF,EAAMI,SAASxP,SA0BtCwa,EAAkB9G,EAAK2G,GACvBG,EAAkB7G,EAAK2G,GAEhB,IAAI7Y,GAAUmN,EAAMY,EAAQ,CAAExP,SCvDlC,SAASoe,GAAeC,EAAYC,EAAY1K,GACnD,OAAOH,EAAa4K,EAAYC,EAAY1K,GAAU,EAAOhB,EAAME,WAGhE,SAASyL,GAAgBF,EAAYC,EAAY1K,GACpD,OAAOH,EAAa6K,EAAYD,EAAYzK,GAAU,EAAOhB,EAAMG,Y,8PCFlDyL,G,WAUjB,WAAaxe,EAAM4O,EAAMY,EAAQvJ,I,4FAAQ,SACrCpC,KAAK7D,KAAOA,EACZ6D,KAAK2L,OAASA,EACd3L,KAAKoC,OAASA,EACdpC,KAAK+K,KAAO/K,KAAK4a,UAAU7P,G,6CAUpBA,GAAM,WACb,OAAOA,EAAK5C,KAAI,SAAA+K,GAAA,OAAS,EAAK9Q,OAAOuE,MAAMuM,EAAO,CAAEnT,OAAQ,EAAK4L,OAAO5L,gB,+PCX3D8a,G,WAQjB,WAAa5J,EAActE,I,4FAAY,SACnC3M,KAAKiR,aAAeA,EACpBjR,KAAK2M,WAAaA,E,4CAclB,MAAM,IAAI8D,MAAM,yB,+BAUhB,OAAOzQ,KAAKiR,aAAatF,S,6BAUzB,OAAO3L,KAAKiR,aAAa9U,O,6BAUzB,OAAO6D,KAAKiR,aAAatF,OAAOC,O,gCAUhC,OAAO5L,KAAKiR,aAAatF,OAAOmP,U,oCAUhC,OAAO9a,KAAKiR,aAAatF,OAAOoP,c,oCAUhC,OAAO/a,KAAKiR,aAAatF,OAAOqP,aAAehb,KAAKiR,aAAatF,OAAOxP,O,6BASpE,WACE4O,EAAO,GAIb,OAHA2B,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjCmP,EAAK3E,KAAK,EAAK6K,aAAalG,KAAKnP,OAE9BmP,I,sCAUP,MAAM,IAAI0F,MAAM,0B,gCA9FhB,MAAM,IAAIA,MAAM,yB,8BAyIhB,MAvCgB,CACZwK,QAAS,GACTC,SAAUlb,KACVgU,UAHY,SAGF7X,GAEN,OADA6D,KAAKib,QAAQ9e,KAAOA,EACb6D,MAEX2L,OAPY,SAOLA,GAEH,OADA3L,KAAKib,QAAQtP,OAASA,EACf3L,MAEX+K,KAXY,SAWPA,GAED,OADA/K,KAAKib,QAAQlQ,KAAOA,EACb/K,MAEXiR,aAfY,SAeCA,GAET,OADAjR,KAAKib,QAAQhK,aAAeA,EACrBjR,MAEX2M,WAnBY,SAmBDA,GAEP,OADA3M,KAAKib,QAAQtO,WAAaA,EACnB3M,MAEXmb,MAvBY,WAwBR,IAAIlK,EAAe,KACnB,GAAIjR,KAAKib,QAAQhK,wBAAwB0J,GACrC1J,EAAejR,KAAKib,QAAQhK,iBACzB,KAAIjR,KAAKib,QAAQtP,SAAU3L,KAAKib,QAAQlQ,KAO3C,MAAM,IAAI0F,MAAM,4BANhBQ,EAAe,IAAI0J,GAAa3a,KAAKib,QAAQ9e,KACzB6D,KAAKib,QAAQlQ,KACb/K,KAAKib,QAAQtP,OACb3L,KAAKkb,SAAS9Y,UAKtC,OAAO,IAAIpC,KAAKkb,SAASjK,EAAcjR,KAAKib,QAAQtO,kB,+PCjK/CyO,G,stBAYb,OAHKpb,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,gB,4CAUZ,MAAM,IAAI5K,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAjCmB8P,I,0PCElBU,G,stBAYb,OAHKvb,KAAKqb,gBACNrb,KAAKqb,cAAgBrb,KAAKsb,uBAEvBtb,KAAKqb,gB,6BAUZ,OAAOrb,KAAKiR,aAAatF,OAAO6P,O,iCAUhC,OAAOxb,KAAKiR,aAAatF,OAAOwN,UAAYpB,K,qCAShC,IACJ0D,EAAiBzb,KAAKiR,aAAatF,OAAnC8P,aACR,OAAOA,aAAwBnD,SAAWmD,EAAe5Q,I,4CAUzD,MAAM,IAAI4F,MAAM,yB,sCAWhB,OAAOzQ,KAAK+K,W,GAhEiB8P,I,0PCLhBa,G,yKAQb,MAAM,IAAIjL,MAAM,2B,+PCJHkL,G,mtBAQVxa,GAQH,OALK+L,EAAkB0O,UAAUza,GAGpB+L,EAAkB2O,eAAe1a,GAFjCsF,OAAOtF,GAAK2a,W,GAZcJ,I,0PCC1BK,G,utBASb,OAAO3d,EAAiBC,c,4CAUL,WACbwb,EAAO,IAAImC,IACXC,EAAS,GAUf,OAPAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAGb+I,K,gCAIP,OAAO,IAAIN,O,GAnCsBP,I,0PCApBe,G,mtBAoBVhb,E,GAAiB,IAAVpB,EAAU,EAAVA,OACN6D,SAKJ,GAHK5D,KAAKoc,OACNpc,KAAKoc,KAAO,IAAItc,EAAkBC,IAEjCmN,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIhB,EAAaH,KAAKoc,KAAK3T,cAActH,GACzCyC,EAASzD,EAAaA,EAAWoK,UAAY2C,EAAkBM,GAInE,OAAO5J,M,GAhC6B8X,I,0PCEvBW,G,YAQjB,WAAapL,EAActE,I,4FAAY,e,iKAAA,wDAC7BsE,EAActE,IADe,OAGnC,EAAK2P,eAAiB,KAHa,E,wXAahB,WACbzC,EAAO,IAAImC,IACXC,EAAS,GAYf,OARAvP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAChCie,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO7V,KAAK8M,OAIb+I,I,qDAWP,GAAIjc,KAAKsc,eACL,OAAOtc,KAAKsc,eAUhB,IAPA,IAAMC,EAAavc,KAAK+K,OAAO+J,QAAO,SAAA9K,GAAA,QAAUA,aAAgBkD,MAAoB8E,MAAK,SAAChP,EAAGO,GAAJ,OAAUP,EAAIO,KACjGiZ,EAAQD,EAAW5a,OACrB8a,EAAU/T,OAAOgU,kBACjBC,SACAC,SACAC,EAAiB,EAEZjhB,EAAI,EAAGA,EAAI4gB,EAAO5gB,IACvB+gB,EAAYJ,EAAW3gB,EAAI,IAC3BghB,EAAYL,EAAW3gB,MAEL+gB,IAIlBF,EAAUzY,KAAKyT,IAAIgF,EAASG,EAAYL,EAAW3gB,EAAI,IACvDihB,KAQJ,OALKA,IACDJ,EAAU,MAEdzc,KAAKsc,eAAiBG,EAEfzc,KAAKsc,iB,+BAUZ,OAAOtc,KAAKiR,aAAatF,OAAO5L,S,sCAUnB,WACPgL,EAAO,GACP+R,EAAa9c,KAAKD,SAaxB,OAXA2M,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GAErC,GAAIsR,EAAkB0O,UAAU1I,KAAY4J,GAAcpU,OAAOrH,SAAS6R,GAAS,CAE/E,IAAM6J,EAAc7P,EAAkB2O,eAAe3I,IAAUA,EAC/DnI,EAAK3E,KAAK2W,QAEVhS,EAAK3E,KAAKtG,EAAkBwG,SAAS4M,EAAO4J,OAG7C/R,K,gCAIP,OAAO,IAAIoR,O,GAjHmBf,I,0PCHjB4B,G,mtBAQV7b,GAEHA,EAAMsF,OAAOtF,GACb,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAK7ByC,EAASsJ,EAAkB2O,eAAe1a,OALP,CACnC,IAAI8b,EAAU9b,EAAIoH,MALR,2DAMV3E,EAASqZ,EAAavU,OAAOwU,WAAWD,EAAQ,IAAvC,IAA8CvU,OAAOwU,WAAWD,EAAQ,IAC9D/P,EAAkBM,GAIzC,OAAO5J,M,GApB2B8X,I,0PCArByB,G,muBASb,IAAMC,EAAUpd,KAAKiR,aAAatF,OAAO0R,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQzb,OAAS,M,6BAU7C,OAAO3B,KAAKiR,aAAatF,OAAO0R,Q,gCAIhC,OAAO,IAAIL,O,GAxBiB5B,I,0PCAfkC,G,mtBAQVnc,GACH,IAAIyC,SAEJ,GAAKsJ,EAAkB0O,UAAUza,GAI7ByC,EAASsJ,EAAkB2O,eAAe1a,OAJP,CACnC,IAAIC,EAAY8b,WAAW/b,EAAK,IAChCyC,EAAS8E,OAAO4O,MAAMlW,GAAa8L,EAAkBM,GAAKpM,EAI9D,OAAOwC,M,GAjB+B8X,I,0PCGzB6B,G,utBASb,OAAO/e,EAAeC,a,4CAUH,WACfgZ,EAAM/O,OAAOgU,kBACbhF,EAAMhP,OAAO8U,kBAiBjB,OAdA9Q,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMsX,EAAQ,EAAKjC,aAAalG,KAAKnP,GACjCsX,aAAiBhG,IAIjBgG,EAAQuE,IACRA,EAAMvE,GAENA,EAAQwE,IACRA,EAAMxE,OAIP,CAACuE,EAAKC,M,gCAIb,OAAO,IAAI4F,O,GA1CqB/B,I,0PCNlCkC,G,WACF,c,4FAAc,SACVzd,KAAK0d,WAAa,IAAI1K,I,qDAGR8H,EAAS6C,GAEvB,OADA3d,KAAK0d,WAAWrK,IAAIyH,EAAS6C,GACtB3d,O,0BAGP4L,GACA,OAAO5L,KAAK0d,WAAWtK,IAAIxH,K,0BAG3BA,GACA,OAAO5L,KAAK0d,WAAWjhB,IAAImP,O,KAI7BgS,GAAwB,SAAChV,GAC3BA,EACiBiV,kBAAkBzf,EAAiBC,YAAa0d,IAChD8B,kBAAkBzf,EAAiBE,SAAU+d,IAC7CwB,kBAAkBzf,EAAiBG,OAAQ4e,IAC3CU,kBAAkBrf,EAAeC,WAAY8e,KAanDO,GAVQ,WACnB,IAAIlV,EAAQ,KAMZ,OAAOA,IAJHA,EAAQ,IAAI6U,GACZG,GAAsBhV,GACfA,GALQ,GCgChB,SAASmV,GAAaC,EAAYrS,EAAQsS,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQtc,SACrBsc,EAAUtS,EAAOxD,KAAI,SAAA6B,GAAA,OAAQA,EAAK7N,SAGtC8hB,EAAQnV,SAAQ,SAACqV,EAAQviB,GACrBsiB,EAAWC,GAAUviB,KAGlB+P,EAAOxD,KAAI,SAAA6B,GAAA,OAnEtB,SAAyBe,EAAMY,GAG3B,OAFAZ,EAAOA,GAAQ,GAEX+S,GAAc1K,IAAIzH,EAAOmP,SAClBgD,GAAcrhB,IAAIkP,EAAOmP,SACfsD,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WALV,MAK0B5B,EAAKpJ,OAAS,IAC9BwZ,QAEd2C,GACUrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrF+f,QACApK,UAAUrI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WANV,MAM0B5B,EAAKpJ,OAAS,IAC9BwZ,QAgDSkD,CAAgBL,EAAWE,EAAWlU,EAAK7N,OAAQ6N,MC3ElE,QACX8S,WAAY/e,EAAWI,M,0PCANmgB,G,WACjB,WAAY1S,I,4FAAM,SACd5L,KAAKue,MAAQ3S,E,6CAQb,MAAM,IAAI6E,MAAM,qC,2BAJhB,OAAOzQ,KAAKue,U,KCThBC,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBC,GACvB,OAAO,IAAIxG,SAAS,IAAK,WAAawG,EAAQ3W,KAAI,SAAShM,EAAMP,GAC/D,OAAOmjB,KAAKC,UAAU7iB,GAAQ,OAASP,EAAI,OAC1CqH,KAAK,KAAO,KAWjB,SAASgc,GAAaC,GACpB,IAAIC,EAAY7iB,OAAOY,OAAO,MAC1B4hB,EAAU,GAUd,OARAI,EAAKpW,SAAQ,SAASwM,GACpB,IAAK,IAAI8J,KAAU9J,EACX8J,KAAUD,GACdL,EAAQ1Y,KAAK+Y,EAAUC,GAAUA,MAKhCN,EAGT,SAASjf,GAAIhD,EAAOwiB,GAClB,IAAI1hB,EAAId,EAAQ,GAAI8E,EAAShE,EAAEgE,OAC/B,OAAOA,EAAS0d,EAAQ,IAAIrW,MAAMqW,EAAQ1d,EAAS,GAAGsB,KAAK,GAAKtF,EAAIA,EAStE,SAAS2hB,GAAW3f,GAClB,IAPkBwE,EAOd3B,EAAQ7C,EAAK4f,cACbC,EAAU7f,EAAK8f,gBACfC,EAAU/f,EAAKggB,gBACfC,EAAejgB,EAAKkgB,qBACxB,OAAOvI,MAAM3X,GAAQ,iBAXHwE,EAYDxE,EAAKmgB,kBAXR,EAAI,IAAMjgB,IAAKsE,EAAM,GAC/BA,EAAO,KAAO,IAAMtE,GAAIsE,EAAM,GAC9BtE,GAAIsE,EAAM,IAS+B,IAAMtE,GAAIF,EAAKogB,cAAgB,EAAG,GAAK,IAAMlgB,GAAIF,EAAKqgB,aAAc,IAC1GJ,EAAe,IAAM/f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAAM7f,GAAI+f,EAAc,GAAK,IACnHF,EAAU,IAAM7f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IAAM3f,GAAI6f,EAAS,GAAK,IAChFF,GAAWhd,EAAQ,IAAM3C,GAAI2C,EAAO,GAAK,IAAM3C,GAAI2f,EAAS,GAAK,IACjE,IAGO,gBAASS,GACtB,IAAIC,EAAW,IAAI9f,OAAO,KAAQ6f,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAU/f,EAAM+T,GACvB,IAIIvX,EAJAoiB,EAAO,GACPoB,EAAIhgB,EAAKqB,OACT4e,EAAI,EACJljB,EAAI,EAEJmjB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAASpa,IACP,GAAIma,EAAK,OAAO/B,GAChB,GAAIgC,EAAK,OAAOA,GAAM,EAAOjC,GAG7B,IAAI5iB,EAAUK,EAAPykB,EAAIH,EACX,GAAIjgB,EAAK8f,WAAWM,KAAOhC,GAAO,CAChC,KAAO6B,IAAMD,GAAKhgB,EAAK8f,WAAWG,KAAO7B,IAASpe,EAAK8f,aAAaG,KAAO7B,KAI3E,OAHK9iB,EAAI2kB,IAAMD,EAAGE,GAAM,GACdvkB,EAAIqE,EAAK8f,WAAWG,QAAU5B,GAAS8B,GAAM,EAC9CxkB,IAAM2iB,KAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,GACpEjgB,EAAK8T,MAAMsM,EAAI,EAAG9kB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOggB,EAAID,GAAG,CACZ,IAAKrkB,EAAIqE,EAAK8f,WAAWxkB,EAAI2kB,QAAU5B,GAAS8B,GAAM,OACjD,GAAIxkB,IAAM2iB,GAAU6B,GAAM,EAAUngB,EAAK8f,WAAWG,KAAO5B,MAAW4B,OACtE,GAAItkB,IAAMkkB,EAAW,SAC1B,OAAO7f,EAAK8T,MAAMsM,EAAG9kB,GAIvB,OAAO4kB,GAAM,EAAMlgB,EAAK8T,MAAMsM,EAAGJ,GAGnC,IA7BIhgB,EAAK8f,WAAWE,EAAI,KAAO3B,MAAW2B,EACtChgB,EAAK8f,WAAWE,EAAI,KAAO1B,MAAU0B,GA4BjCxjB,EAAIuJ,OAAaoY,IAAK,CAE5B,IADA,IAAInJ,EAAM,GACHxY,IAAM0hB,IAAO1hB,IAAM2hB,IAAKnJ,EAAIlP,KAAKtJ,GAAIA,EAAIuJ,IAC5CgO,GAA4B,OAAtBiB,EAAMjB,EAAEiB,EAAKjY,OACvB6hB,EAAK9Y,KAAKkP,GAGZ,OAAO4J,EAGT,SAASyB,EAAczB,EAAMJ,GAC3B,OAAOI,EAAK/W,KAAI,SAASmN,GACvB,OAAOwJ,EAAQ3W,KAAI,SAASiX,GAC1B,OAAOwB,EAAYtL,EAAI8J,OACtBnc,KAAKgd,MAkBZ,SAASY,EAAUvL,GACjB,OAAOA,EAAInN,IAAIyY,GAAa3d,KAAKgd,GAGnC,SAASW,EAAY/jB,GACnB,OAAgB,MAATA,EAAgB,GACjBA,aAAiB+C,KAAO0f,GAAWziB,GACnCqjB,EAASY,KAAKjkB,GAAS,IAAM,IAAOA,EAAM0D,QAAQ,KAAM,MAAU,IAClE1D,EAGR,MAAO,CACL8J,MA5FF,SAAerG,EAAM+T,GACnB,IAAI0M,EAASjC,EAASI,EAAOmB,EAAU/f,GAAM,SAASgV,EAAK1Z,GACzD,GAAImlB,EAAS,OAAOA,EAAQzL,EAAK1Z,EAAI,GACrCkjB,EAAUxJ,EAAKyL,EAAU1M,EAtD/B,SAAyByK,EAASzK,GAChC,IAAI/W,EAASuhB,GAAgBC,GAC7B,OAAO,SAASxJ,EAAK1Z,GACnB,OAAOyY,EAAE/W,EAAOgY,GAAM1Z,EAAGkjB,IAmDMkC,CAAgB1L,EAAKjB,GAAKwK,GAAgBvJ,MAGzE,OADA4J,EAAKJ,QAAUA,GAAW,GACnBI,GAuFPmB,UAAWA,EACXtgB,OA5BF,SAAgBmf,EAAMJ,GAEpB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrC,CAACJ,EAAQ3W,IAAIyY,GAAa3d,KAAKgd,IAAYgB,OAAON,EAAczB,EAAMJ,IAAU7b,KAAK,OA2B5Fie,WAxBF,SAAoBhC,EAAMJ,GAExB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrCyB,EAAczB,EAAMJ,GAAS7b,KAAK,OAuBzCke,WApBF,SAAoBjC,GAClB,OAAOA,EAAK/W,IAAI0Y,GAAW5d,KAAK,SC1IhCme,GAAMC,GAAI,KCAVC,IDEkBF,GAAIza,MACAya,GAAIf,UACPe,GAAIrhB,OACAqhB,GAAIF,WACJE,GAAID,WCNrBE,GAAI,OAEQC,GAAI3a,MACA2a,GAAIjB,UACPiB,GAAIvhB,OACAuhB,GAAIJ,WACJI,GAAIH,WC8ChBI,OAnCf,SAAgBzP,EAAKnG,EAAQ9E,GACzB,IAAKmC,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAEpB,IAGM+Q,EAAe7V,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QACzD0K,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,GAGuB7a,GAE3C,IAAMiY,EAAU,GACV1Y,EAAOub,EAAY7C,GAErBb,EAAUuD,EACV3a,EAAQ6a,iBAGRzD,EAAUnM,EAAItK,OAAO,EAAG,GAAG,IAG/B,IAAMoa,EAAY3D,EAAQtK,QAAO,SAACC,EAAKiO,EAAGjmB,GAAT,OAC7BU,OAAO8Q,OAAOwG,G,EAAYhY,G,EAAJimB,K,EAAtB,I,sGACD,IAUH,OARA/P,EAAIhJ,SAAQ,SAACD,GACT,IAAM0C,EAAQ,GAKd,OAJAiW,EAAa1Y,SAAQ,SAACgZ,GAClB,IAAMC,EAAYH,EAAUE,GAC5BvW,EAAMnF,KAAKyC,EAAOkZ,OAEf3b,eAAQmF,MAEZ,CAACiW,EAAc1C,IChBXkD,OAXf,SAAiB7X,EAAKwB,EAAQ9E,GAK1BA,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBsU,gBAAgB,EAChBO,eAAgB,KAEuBpb,GAE3C,IAAMwa,EAAMa,GAAMrb,EAAQob,gBAC1B,OAAOV,GAAOF,EAAIhB,UAAUlW,GAAMwB,EAAQ9E,I,0PC5BzBsb,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJpkB,EAAWE,U,0WAGb8M,EAAMY,EAAQ9E,GAClB,OAAOmb,GAAOjX,EAAMY,EAAQ9E,O,GANYyX,ICqDjC8D,OA7Bf,SAAmBtQ,EAAKnG,GACpB,IAAK3C,MAAMqB,QAAQsB,GACf,MAAM,IAAI8E,MAAM,iDAGpB,IAAM0N,EAAS,GACXviB,EAAI,EACJymB,SACEvD,EAAU,GACV1Y,EAAOub,EAAY7C,GACnBwD,EAAmB3W,EAAOxD,KAAI,SAAAsZ,GAAA,OAAcA,EAAWtlB,QAgB7D,OAdA2V,EAAIhJ,SAAQ,SAACkB,GACT,IAAMnB,EAAS,GACfyZ,EAAiBxZ,SAAQ,SAAC2Y,GAClBA,KAActD,EACdkE,EAAiBlE,EAAOsD,IAExBtD,EAAOsD,GAAc7lB,IACrBymB,EAAiBzmB,EAAI,GAEzBiN,EAAOwZ,GAAkBrY,EAAKyX,MAElCrb,eAAQyC,MAGL,CAACvM,OAAO0J,KAAKmY,GAASW,I,0PClDZyD,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJxkB,EAAWC,Y,0WAGb+M,EAAMY,EAAQ9E,GAClB,OAAOub,GAASrX,EAAMY,EAAQ9E,O,GANKyX,I,0PCAtBkE,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJzkB,EAAWG,U,0WAGb6M,EAAMY,EAAQ9E,GAClB,OAAO0a,GAAOxW,EAAMY,EAAQ9E,O,GANWyX,ICmBhCmE,OAXf,SAAe1X,EAAMY,EAAQ9E,GACzB,IAAM6b,EAAa,CAAEN,YAAUJ,UAAQT,WACjCzE,EAAahS,EAAiBC,GAEpC,IAAK+R,EACD,MAAM,IAAIrM,MAAM,mCAGpB,OAAOiS,EAAW5F,GAAY/R,EAAMY,EAAQ9E,I,0PChB3B8b,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJ5kB,EAAWI,O,0WAGb4M,EAAMY,EAAQ9E,GAClB,OAAO1I,GAAK4M,EAAMY,EAAQ9E,O,GANayX,I,6PCEzCsE,G,WACF,c,4FAAc,SACV5iB,KAAK4I,MAAQ,IAAIoK,IACjBhT,KAAK0iB,WAAW1iB,KAAK6iB,yB,2DAIrB,MAAO,CACH,IAAIV,GACJ,IAAIK,GACJ,IAAID,GACJ,IAAII,M,mCASgB,WAAjBD,EAAiB,uDAAJ,GAEpB,OADAA,EAAW5Z,SAAQ,SAAAga,GAAA,OAAa,EAAKla,MAAMyK,IAAIyP,EAAUlX,KAAMkX,MACxD9iB,KAAK4I,Q,+BAQPka,GACL,OAAIA,aAAqBxE,IACrBte,KAAK4I,MAAMyK,IAAIyP,EAAUlX,KAAMkX,GACxB9iB,MAEJ,O,iCASA8iB,GAEP,OADA9iB,KAAK4I,MAAMyP,OAAOyK,EAAUlX,MACrB5L,O,0BAGP7D,GACA,OAAI6D,KAAK4I,MAAMwK,IAAIjX,GACR6D,KAAK4I,MAAMnM,IAAIN,GAEnB,S,KAeA4mB,GAVS,WACpB,IAAIna,EAAQ,KAMZ,OAAOA,IAHHA,EAAQ,IAAIga,IAJI,G,ioBChDxB,SAASI,GAAsBna,EAAQqI,EAAe+R,EAASrnB,GAC3D,IAAMsnB,EAAO,GADiD,uBAG9D,YAA2Bra,EAAOqP,UAAlC,+CAA6C,yBAAjC/a,EAAiC,KAA5BoO,EAA4B,KACzC2X,EAAK3X,EAAMpP,QAAU,IAAI6P,EAAMkF,EAAc/T,GAAKvB,GAAIqnB,EAAQ9lB,GAAKvB,GAAI2P,IAJb,6EAM9D,OAAO2X,EAGJ,SAAS9R,GAAiBvI,GAC7B,IAAMqa,EAAO,GAEb,IAAK,IAAM/lB,KAAO0L,EACdqa,EAAK/lB,GAAO,IAAI6O,EAAMnD,EAAO1L,GAAK+O,eAAgBrD,EAAO1L,GAAK8O,SAAU9O,GAE5E,OAAO+lB,EAGJ,IAAMC,GAAe,SAAC,EAA6BC,EAAmBC,GAAmB,cAAlE1W,EAAkE,KAAtDiJ,EAAsD,KACxF0N,EAAS1N,EAAcjU,OAASiU,EAAc/I,MAAM,KAAO,GAC3D0W,EAAkBH,EAAkB/X,YACpCmY,EAAYF,EAAOnb,KAAI,SAAAsb,GAAA,OfGxB,SAAoCxS,EAActE,GAAY,IACzDhB,EAAWsF,EAAXtF,OAER,OAAImS,GAAc1K,IAAIzH,EAAOmP,SAClBgD,GAAcrhB,IAAIkP,EAAOmP,SACfsD,QACAnN,aAAaA,GACbtE,WAAWA,GACXwO,QAEd2C,GACUrhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrF+f,QACAnN,aAAaA,GACbtE,WAAWA,GACXwO,QelBkBuI,CAA2BH,EAAgBE,GAAMxS,aAActE,MAClG,OAAO1B,EAAWC,gBAAgBsY,EAAWH,IAGpCM,GAA2B,SAACC,EAAOC,GAAuC,IACzC,EADa1W,EAA4B,uDAAnB,GAAI2W,EAAe,aAC/ED,IAActV,EAAeI,SAC7BiV,EAAMG,YAAYpiB,OAAS,GAC3B,EAAAiiB,EAAMG,aAAY3d,KAAlB,WAA0B0d,KAE1BF,EAAMG,YAAY3d,KAAK,CACnB4d,GAAIH,EACJI,KAAM9W,EACN+W,SAAUJ,KAITK,GAA4B,SAACC,EAAUC,GAAU,OAC1D,EAAAA,EAAMC,qBAAoBle,KAA1B,WAAkCge,EAASE,qBAA3C,UAAmEF,EAASL,gBAGnEQ,GAAqB,SAACH,EAAUR,EAAOC,GAAuC,IAA5B1W,EAA4B,uDAAnB,GAAI2W,EAAe,aACvFH,GAAyBC,EAAOC,EAAW1W,EAAQ2W,GACnDK,GAA0BC,EAAUR,IAGlCY,aACD3lB,EAAcC,OAAS,CACpB2lB,UAAW,CAAC,cACZC,SAAU,EAAC,GAAM,KAHnB,MAKD7lB,EAAcE,QAAU,CACrB0lB,UAAW,CAAC,oBACZC,SAAU,EAAC,GAAO,KAPpB,MASD7lB,EAAcG,IAAM,CACjBylB,UAAW,CAAC,aAAc,oBAC1BC,SAAU,EAAC,GAAM,KAXnB,IAeAC,GAAqB,SAAChY,EAAY/Q,EAAGgpB,GACvC,IAA2B,IAAvBA,GAA4BhpB,IAAOgpB,EAAoB,EAAI,CAC3D,IAAMC,EAAKlY,EAAWhL,OAAS,EAE/BgL,EAAWkY,GAASlY,EAAWkY,GAAIhY,MAAM,KAAK,GAA9C,IAAoDjR,OAEpD+Q,EAAWvG,KAAX,GAAmBxK,IAIdkpB,GAA2B,SAACnY,EAAYoY,EAAShoB,GAC1D,IAEMioB,EAAgB,GAChBC,EAAgB,GAJ6C,KAM9BT,GAAcznB,GAAM2nB,SANU,GAM5DQ,EAN4D,KAM9CC,EAN8C,KAanE,OALAzY,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMwpB,EAAgBL,EAAQnpB,GAC9BwpB,GAAiBF,GAAgBP,GAAmBK,EAAeppB,GAT5C,IAUtBwpB,GAAiBD,GAAgBR,GAAmBM,EAAerpB,GAT7C,MAWpB,CACH+Q,WAAYqY,EAAc/hB,KAAK,KAC/BoiB,iBAAkBJ,EAAchiB,KAAK,OAKhCqiB,GAA0B,SAAC3Y,EAAYoY,EAAShoB,EAAM0c,EAAcF,GAC7E,IAAIqL,EAAoB,GAClBW,EAAkB,GAClBC,EAAe,GAyBrB,OAvBA9Y,EAAmBC,GAAY,SAAC/Q,GAC5B,GAAImpB,EAAQnpB,GAAI,CACZ,IAAIie,EAAO,GAEP4L,EAAe,CAAEzf,KAAM,IAE3ByT,EAAa3Q,SAAQ,SAACgR,GAClB,IAAM/O,EAAOwO,EAAcO,GAAG7I,aAAalG,KAAKnP,GAChDie,EAAUA,EAAV,IAAkB9O,EAClB0a,EAAazf,KAAK8T,GAAK/O,UAGG7K,IAA1BqlB,EAAgB1L,KAChB0L,EAAgB1L,GAAQ,GACxB+K,EAAkB/K,IAAS,EAC3B2L,EAAa3L,GAAQ4L,GAGzBd,GAAmBY,EAAgB1L,GAAOje,EAAGgpB,EAAkB/K,IAC/D+K,EAAkB/K,GAAQje,MAI3B,CACH2pB,kBACAC,iBAKKE,GAAe,SAACC,EAAUC,EAAUzY,EAAQiX,EAAUyB,GAC/D,IAAI9L,EAAc,GACdC,EAAgB,kBAAMoK,EAAS9S,gBAC3BvU,EAASoQ,EAATpQ,KACF4P,EAAagZ,EAAShV,YACtB9H,EAAS8c,EAASG,uBAAuBjd,OACzCkd,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAChD8U,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAShD,OAAO8a,EAASlZ,GAPS,SAAAzK,GAAA,OAAS0jB,EAC9B5C,GAAqBna,EAAQkd,EAAqBC,EAAe9jB,GACjEA,EACA8X,EACAD,KAG0Chd,IAGrCkpB,GAAqB,SAACrC,GAC/B,IAAM+B,EAAW/B,EAAMsC,OAAM,GACvB9C,EAAoBQ,EAAMkC,uBAShC,OARAH,EAASjP,eAAiB0M,EAAkBva,OAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK,KAG3EmgB,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KACnCia,EAAS1L,wBAAwBkM,wBAE1BR,GAGLS,GAAS,SAACtU,EAAK/G,EAAMsb,GAGvB,IAFA,IAAIlpB,EAAMkpB,EAAGvU,EAAK/G,EAAM,GAEfnP,EAAI,EAAG+M,EAAMmJ,EAAInQ,OAAQ/F,EAAI+M,EAAK/M,IACvCuB,EAASA,EAAT,IAAgBkpB,EAAGvU,EAAK/G,EAAMnP,GAElC,OAAOuB,GAGEmpB,GAAyB,SAAC1C,EAAO2C,GAA4B,IAAhBpZ,EAAgB,uDAAP,GAC3DqZ,EAAM,GACJ3C,EAAY1W,EAAO0W,WAAaxU,EAChCoX,EAAkBtZ,EAAOsZ,kBAAmB,EAC5CC,EAAcT,GAAmBrC,GACjC+C,EAAoBD,EAAYE,kBAKlCJ,EAHCD,EAAW5kB,OAGN4kB,EAAWpe,KAAI,SAAA0e,GAAA,OACbC,SACEjS,GAF0B4D,EAqCjCoO,GAnC2BE,UACpBC,EAAevO,EAAUmO,kBACzB/N,EAAavc,OAAO0J,KAAKyS,EAAUrI,gBAAgBvE,gBACpDiJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBM,EAAOpO,EAAWlX,OAClBulB,EAAUrO,EAAW1Q,KAAI,SAAAjM,GAAA,OAC3B8qB,EAAa9qB,GAAGgG,SACd8W,EAAW1c,OAAO0J,KAAKyS,EAAUrI,gBAAgB5E,cAClDsJ,QAAO,SAAA5Y,GAAA,OAAKA,KAAKyqB,KAChBQ,EAAc1O,EAAUrI,gBAAgB/E,YACxCN,EAAO8J,EAAQ9J,KACfkR,EAASjD,EAASrF,QAAO,SAACC,EAAKwT,GAEjC,OADAxT,EAAIwT,GAAKD,EAAYC,GAAGnL,SACjBrI,IACR,IACGyT,EAAY,GAElBP,EAAQ,SAAChV,EAAKwD,EAAKzB,GAAX,OAAmByB,EAAIxD,EAAI+B,KAC/BoT,GACAlc,EAAKjC,SAAQ,SAACwM,GACV,IAAMnY,EAAMipB,GAAOc,EAAS5R,EAAKwR,GACjCO,EAAUlqB,GAAO,KAIzB2pB,EAAQ,SAAChV,EAAKjJ,EAAQgL,GAAd,OAAsBhL,EAAOiJ,EAAI+B,IAAMwG,eACxCtP,EAAKpJ,OAAS,SAACkH,GAClB,IAAMye,GAAUL,GAAOI,EAAUjB,GAAOvN,EAAYhQ,EAAQie,IAE5D,OAAIL,EACOzN,EAASuO,OAAM,SAAAhc,GAAA,OAAS1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,IACxE1C,EAAO0C,GAAO8O,eAAiB4B,EAAO1Q,GAAO,OAAO+b,EAErDA,GACP,kBAAM,GApCqB,IAAC7O,EAC5BqO,EACEjS,EACAmS,EACAnO,EAEAoO,EACAC,EAEAlO,EAEAmO,EACApc,EACAkR,EAIAoL,KAnBJ,CAAC,kBAAM,IAqDjB,OAVIxD,IAAcxU,EACEqX,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIe,OAAM,SAAAlB,GAAA,OAAMA,EAAGxd,QAAU,CACtE4e,WAAW,IAGCf,EAAYc,QAAO,SAAA3e,GAAA,OAAU2d,EAAIkB,MAAK,SAAArB,GAAA,OAAMA,EAAGxd,QAAU,CACrE4e,WAAW,KA+CVE,GAAuB,SAAChC,EAAUhZ,EAAYyX,EAAUwD,EAAchC,GAC/ED,EAAShV,YAAchE,EACvBgZ,EAAS1L,wBAAwBkM,wBACjC5B,GACIH,EACAuB,EACApX,EAAeC,OACd,CAAErB,OAAQya,GACThC,IA+BGiC,GAAmB,SAACzD,EAAU0D,EAAW3a,EAAQ4a,GAC1D,IAAMC,EAAS5D,EAAS8B,MAAM/Y,EAAOsa,WACjCQ,EAAgBH,EAiBpB,OAhBI3a,EAAOpQ,OAAS8B,EAAcE,UAC9BkpB,EAAgBF,EAAUjT,QAAO,SAAAd,GAAA,OAA+C,IAAlC8T,EAAU3hB,QAAQ6N,OAIpEgU,EAAOtR,eAAiBuR,EAAchlB,KAAK,KAC3C+kB,EAAO/N,wBAAwBkM,wBAE/B5B,GACIH,EACA4D,EACAzZ,EAAeE,QACf,CAAEqZ,YAAW3a,SAAQ+a,gBAAiBD,GACtC,MAGGD,GAIEG,GAAmB,SAAC/D,EAAUgE,EAAcjb,EAAQ4a,GAAjC,OAC5BK,EAAajgB,KAAI,SAAAkgB,GAAA,OACbR,GAAiBzD,EAAUiE,EAAYlb,EAAQ4a,OAE1CO,GAAqB,SAAC7G,GAO/B,IALAA,EAAahY,EAAQ,GAAIgY,IACT7V,OACZ6V,EAAW7V,KAAOlN,EAAUE,YAG3B6iB,EAAW3G,QACZ,OAAQ2G,EAAW7V,MACnB,KAAKlN,EAAUC,QACX8iB,EAAW3G,QAAUtc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX6iB,EAAW3G,QAAU1c,EAAiBC,YAK9C,OAAOojB,GAcE8G,GAA4B,SAAA5c,GAAA,OAAUA,EAAOxD,KAAI,SAACsZ,GAG3D,OAd8B,SAACA,GAAe,IACtC7V,EAAwB6V,EAAxB7V,KAAMkP,EAAkB2G,EAAlB3G,QAAS3e,EAASslB,EAATtlB,KACvB,GAAIyP,IAASlN,EAAUE,WAAagN,IAASlN,EAAUC,QAKnD,MAAM,IAAI8R,MAAJ,wCAAkD7E,EAAlD,aAAmEzP,EAAnE,UAJN,IAAK2hB,GAAc1K,IAAI0H,GACnB,MAAM,IAAIrK,MAAJ,mDAA6DqK,EAA7D,aAAiF3e,EAAjF,UASdqsB,CADA/G,EAAa6G,GAAmB7G,IAEzBA,MAeEgH,GAAa,SAACC,EAAU3d,EAAMY,EAAQ9E,GAC/C8E,EAAS4c,GAA0B5c,GACnC9E,EAAUvK,OAAO8Q,OAAO9Q,OAAO8Q,OAAO,GAAIub,IAAgB9hB,GAC1D,IAAMic,EAAYC,GAAetmB,IAAIoK,EAAQiW,YAG7C,IAAKgG,EACD,MAAM,IAAIrS,MAAJ,mCAA6C5J,EAAQiW,WAArD,WAPiD,MAU3BgG,EAAU/B,QAAQhW,EAAMY,EAAQ9E,GAVL,UAUpDsX,EAVoD,KAU5CjN,EAV4C,MAZ/B,SAACvF,EAAQid,GACrCjd,EAAO7C,SAAQ,SAAC2Y,GACZ,IAAMoH,EAAcpH,EAAWqH,GAC/B,GAAKD,EAAL,CAEA,IAAMhV,EAAM+U,EAAWziB,QAAQsb,EAAWtlB,MAC1CysB,EAAW/U,GAAOgV,EAClBpH,EAAWtlB,KAAO0sB,SACXpH,EAAWqH,OAetBC,CAAiBpd,EAAQwS,GACzB,IAAMhT,EAAW4S,GAAa7M,EAAevF,EAAQwS,GAG/C6K,EAAY/d,EAAWC,gBAAgBC,EAAUtE,EAAQ1K,MAC/DusB,EAASO,mBAAqBD,EAG9BN,EAAS/X,YAAcO,EAAcvP,QAAUuP,EAAc,GAAGvP,OAAzC,MAAuDuP,EAAc,GAAGvP,OAAS,GAAM,GAG9G,IAAMunB,EAAe,GACbrgB,EAAWmgB,EAAXngB,OACFmd,EAAgBnd,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAC1Cgb,EAAsBld,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,mBAQtD,OAPAxE,EAAmBgc,EAAS/X,aAAa,SAAC/U,GACtCstB,EAAattB,GAAKonB,GAAqBna,EAAQkd,EAAqBC,EAAepqB,MAEvFotB,EAAUG,oBAAsBD,EAEhCR,EAAShS,eAAkB/K,EAAOxD,KAAI,SAAA2R,GAAA,OAAKA,EAAE3d,QAAO8G,OACpDylB,EAASU,YAAcviB,EAAQiW,aAAe/e,EAAWI,KAAO2M,EAAiBC,GAAQlE,EAAQiW,WAC1F4L,GAGExU,GAAgB,SAACvI,EAAQJ,GAGlC,IAFA,IAAI3P,EAAI,EAEDA,EAAI+P,EAAOhK,SAAU/F,EACxB,GAAI2P,IAAUI,EAAO/P,GAAGO,KACpB,MAAO,CACHA,KAAMoP,EACNK,KAAMD,EAAO/P,GAAGkf,SAAWnP,EAAO/P,GAAGgQ,KACrC1J,MAAOtG,GAInB,OAAO,MA+BLytB,GAAgC,SAACxC,EAAWpO,GAC9C,IAAM6Q,EAAc7Q,EAAU8Q,iBAC1BC,EAAiB3C,EAerB,OAbAyC,EAAYxgB,SAAQ,SAAC2gB,GACjB,GAAKA,EAAL,CADgC,IAMjB,EANiB,EAhCF,SAACA,GACnC,IAAIC,EAAS,GACT7F,SAEJ,OADAA,EAAY4F,EAAWzF,IAEvB,KAAKzV,EAAeC,OAChBkb,EAAS,CAACD,EAAWvF,UACrB,MACJ,KAAK3V,EAAeE,QAChBib,EAAS,CAACD,EAAWxF,KAAKiE,iBAC1B,MACJ,KAAK3Z,EAAeO,KAChB4a,EAAS,CAACD,EAAWvF,UACrB,MACJ,KAAK3V,EAAeG,QAChBmV,EAAY,UACZ6F,EAAS,CAACD,EAAWxF,KAAK0F,cAAc9c,MAAM,KAAM4c,EAAWvF,UAC/D,MACJ,QACIL,EAAY,KAGhB,MAAO,CACHA,YACA6F,UAa8BE,CAAuBH,GAA7C5F,EALwB,EAKxBA,UAAW6F,EALa,EAKbA,OACnB,GAAI7F,EACA2F,GAAiB,EAAAA,GAAe3F,GAAf,WAA6B6F,GAA7B,QAAqC,CAClDjC,WAAW,UAKhB+B,GAWLK,GAAuB,SAAvBA,EAAwBpR,EAAWoO,GAA8C,IAAnC1Z,EAAmC,uDAA1B,GAAI2c,EAAsB,uDAAP,GACtEC,EAAqBD,EAAaC,mBAClCC,EAAgBF,EAAaE,eAAiB,GAEpD,GAAIvR,IAAcsR,EAAlB,CAIA,IAAME,GAAYD,EAAcroB,SAA+C,IAAtCqoB,EAAc7jB,QAAQsS,GAE/DwR,GAAaxR,EAAUyR,kBAAkBrD,EAAW1Z,GAEpD,IAAMgd,EAAW1R,EAAU2R,UAC3BD,EAASrhB,SAAQ,SAACuhB,GACd,IAAMb,EAAiBH,GAA8BxC,EAAWwD,GAChER,EAAqBQ,EAAOb,EAAgBrc,EAAQ2c,QAI/CQ,GAAsB,SAAC1G,GAChC,KAAOA,EAAM2G,SAAW3G,EAAMG,YAAYyG,MAAK,SAAAtuB,GAAA,OAAKA,EAAE8nB,KAAOzV,EAAeG,YACxEkV,EAAQA,EAAM2G,QAElB,OAAO3G,GAGE6G,GAAmB,SAAC7G,GAC7B,KAAOA,EAAM2G,SACT3G,EAAQA,EAAM2G,QAElB,OAAO3G,GAGE8G,GAAqB,SAAC9G,GAC/B,IADoD,IAAd+G,EAAc,uDAAP,GACtC/G,EAAM2G,SACTI,EAAKvkB,KAAKwd,GACVA,EAAQA,EAAM2G,QAElB,OAAOI,GAGEC,GAA2B,SAACC,EAAaC,EAAYC,EAAgB5d,GAC9E,IAAI+W,SACA2C,SACImE,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SACrCC,EAA8Bje,EAAOie,4BAMvCC,EAAY,GAEhB,GAAoB,OAAhBR,IAA8C,IAAtB1d,EAAOme,WAC/BD,EAAY,CAAC,CACTnH,SAAU,KAEdA,EAAW,OACR,OACCqH,EAAkBjvB,OAAOkvB,OAAOR,EAAqBS,iBAC/B,IAAtBR,IACAM,EAAkBA,EAAgBzW,QAAO,SAAA5Y,GAAA,OAAKA,EAAEiR,OAAOge,WAAaD,MAGxE,IAAMQ,EAAmBH,EAAgBzW,QAlB5B,SAAC6W,GAEd,OADexe,EAAO4C,UAAa,kBAAM,IAC3B4b,EAAOxe,MAgBqChF,KAAI,SAAAyjB,GAAA,OAAUA,EAAOze,OAAO+W,YAEhF8F,EAAgB,GAEtB,IAA0B,IAAtBiB,EAA6B,CAC7B,IAAMY,EAAwBvvB,OAAOkvB,OAAOR,EAAqBS,gBAEjEI,EAAsB/iB,SAAQ,SAACgjB,GAC3B,IAAMC,EAAaD,EAAU3e,QACI,IAA7B4e,EAAWC,eAA2BD,EAAWH,SAAWze,EAAOye,QAC/DG,EAAWZ,WAAaD,IAC5BlB,EAAc5jB,KAAK0lB,EAAUlI,QAC7BM,EAAW2H,EAAsB/W,QAAO,SAAA5Y,GAAA,OAAKA,IAAM4vB,KAAW3jB,KAAI,SAAAjM,GAAA,OAAKA,EAAEiR,OAAO+W,aACvEviB,QAAU0pB,EAAUjlB,KAAK,CAC9B8d,WACA+H,OAAQH,EAAUlI,MAClB+G,KAAMD,GAAmBoB,EAAUlI,aAOnDM,GAAW,MAAGjD,OAAH,qBAAiByK,GAAjB,CAAmCb,KAAc/V,QAAO,SAAA5Y,GAAA,OAAW,OAANA,KACxEmvB,EAAUjlB,KAAK,CACX8d,WACA8F,wBAAmBA,EAAnB,GAAqC7c,EAAO6c,eAAiB,OAIrE,IAAMkC,EAAYpB,EAAWlH,MAEvBuI,EAAa7vB,OAAO8Q,OAAO,CAC7Bgf,kBAAmBvB,EACnBK,uBACD/d,GAEGkf,EAAmBvB,EAAWwB,aAChClB,GAA+BiB,IAC/BxF,EAAYP,GAAuB+F,EAAkBnI,EAAU,CAC3DuC,gBAAiB2E,IAErBvB,GAAqBwC,EAAkBxF,EAAWsF,IAGtDd,EAAUviB,SAAQ,SAACyjB,GACf,IAAMC,EAAmBlG,GAAuB4F,EAAWK,EAAIrI,UACzDyG,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAM8B,EA3HO,SAAC5F,EAAW8D,GACjC,IAAK,IAAI/uB,EAAI,EAAG+M,EAAMgiB,EAAKhpB,OAAQ/F,EAAI+M,EAAK/M,IAAK,CAC7C,IAAMgoB,EAAQ+G,EAAK/uB,GACnBirB,EAAYwC,GAA8BxC,EAAWjD,GAEzD,OAAOiD,EAsHuB6F,CAAiBF,EAAkB7B,EAAKgC,WAC9DJ,EAAIN,OAAO/B,kBAAkBuC,EAAeN,QAE5CtC,GAAqBqC,EAAWM,EAAkBL,EAAY,CAC1DnC,cAAeuC,EAAIvC,cACnBD,mBAAoBqB,GAA+BiB,QAMtDO,GAA4B,SAAC5B,EAAsBF,EAAYC,GACxE,IAAM8B,EAAmB7B,EAAqB6B,iBAE9C,IAAK,IAAMjB,KAAUiB,EAAkB,CACnC,IACMd,EADYc,EAAiBjB,GACNze,OACvB+d,EAAsBH,EAAe5d,OAAOge,SAC5C2B,GAAwB/B,EAAeoB,WAAWW,uBACpD/B,EAAeoB,WAAWW,sBAAsBf,EAAYhB,EAAe5d,QAC/E,GAAI4e,EAAWZ,WAAaD,GAAuB4B,EAAuB,CACtE,IAAMC,EAAgBhB,EAAW7H,SACjC0G,GAAyBmC,EAAejC,EAAY,CAChDE,uBACAC,mBAAmB,EACnBE,SAAUD,GACXa,MAKFiB,GAAqB,SAAChC,GAA6C,IAAvB7d,EAAuB,uDAAd,GAAIyW,EAAU,aACxEqJ,SACEC,EAAkB/f,EAAO+f,gBACzBhJ,EAAW/W,EAAO+W,SAClB/mB,EAASgQ,EAAOye,OAAhB,IAA0Bze,EAAOge,SAGnC8B,EADAC,EACkBlC,EAAqBS,eAErBT,EAAqB6B,iBAG1B,OAAb3I,SACO+I,EAAgB9vB,GAEvB8vB,EAAgB9vB,GAAO,CACnBymB,QACAzW,WAQCggB,GAAyB,SAACrF,EAAWC,EAAWqF,GACzD,IAAMC,EAAsBvF,EAAUnU,QAAO,SAACC,EAAKrI,GAM/C,MAL+B,WAA3BA,EAAM+hB,YAAYnxB,KAClByX,EAAIxN,KAAJ,MAAAwN,EAAA,GAAYmU,EAAUjT,QAAO,SAAAd,GAAA,OAA0C,IAA7BA,EAAUuZ,OAAOhiB,QACpDA,KAAS6hB,GAChBxZ,EAAIxN,KAAKmF,GAENqI,IACR,IACH,OAAO5K,MAAMC,KAAK,IAAI+S,IAAIqR,IAAsBllB,KAAI,SAAAoD,GAAA,OAASA,EAAMuQ,WAU1D3P,GAAwB,SAACZ,EAAO1O,GACzC,OAAI0O,EAAMkQ,aACClQ,EAAMkQ,cAANlQ,CAAqB1O,GAEzBA,G,0PCjII2wB,G,WA/hBX,c,4FAAwB,SACpB,IAAIC,SAEJztB,KAAKuqB,QAAU,KACfvqB,KAAK+jB,YAAc,GACnB/jB,KAAKskB,oBAAsB,GAC3BtkB,KAAKoqB,UAAY,GANG,2BAARV,EAAQ,qBAARA,EAAQ,gBAQE,IAAlBA,EAAO/nB,SAAkB8rB,EAAS/D,EAAO,cAAe8D,GAExDxtB,KAAK0W,eAAiB+W,EAAO/W,eAC7B1W,KAAK2Q,YAAc8c,EAAO9c,YAC1B3Q,KAAKopB,YAAcqE,EAAOrE,YAC1BppB,KAAKuqB,QAAUkD,EACfztB,KAAKipB,mBAAqBjpB,KAAKuqB,QAAQtB,mBACvCjpB,KAAK0tB,gBAAkBpjB,IACvBtK,KAAKia,wBAAwBkM,0BAE7BsC,GAAUA,cAACzoB,MAAX,OAAoB0pB,IACpB1pB,KAAK0tB,gBAAkB1tB,KAAKipB,mBAAmB9sB,KAC/C6D,KAAKia,wBAAwBkM,wBAC7BnmB,KAAK2tB,sBAAwB,CACzBlC,eAAgB,GAChBoB,iBAAkB,K,+CA0B1B,OAAO7sB,KAAKoQ,gBAAgBvH,OAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEyP,c,gCAY9C,OAAO3L,KAAK0tB,kB,sCAIZ,OAAO1tB,KAAK4tB,c,8CAMZ,OAFA5tB,KAAK4tB,YAAczK,GAAa,CAACnjB,KAAK2Q,YAAa3Q,KAAK0W,gBACnD1W,KAAK8lB,uBAAwB9lB,KAAK0tB,iBAChC1tB,O,6CAIP,OAAOA,KAAKipB,qB,2BAiCV4E,EAAU9d,GACZ,OAAOH,EAAa5P,KAAM6tB,EAAU9d,K,kCAuB3B8d,GACT,OAAOje,EAAa5P,KAAM6tB,EAAU3T,GAAkBla,KAAM6tB,IAAW,K,4BAqBpEC,GACH,OAAOxT,GAAMta,KAAM8tB,K,iCAoBXC,GACR,OAAO1X,GAAWrW,KAAM+tB,K,6BAkDpBnI,EAAUzY,GACd,IAAM6gB,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAMf,OAJAta,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,IAC/BpQ,KAAOoQ,EAAOpQ,MAAQixB,EAAUjxB,KDmChB,SAACqnB,EAAUwB,EAAUgC,EAAcqG,GAC9D,IAAIC,EAAe,GAEbnxB,EAAS6qB,EAAT7qB,KAEAirB,EAAS5D,EAAS8B,MAAM+H,EAAYxG,WACpC0G,EAAmBzI,GACrBsC,EACApC,EACAgC,EACAxD,EACAU,IAEEL,EAAYD,GAAcznB,GAAM0nB,UAItC,OAFAkD,GAAqBK,EAAQmG,EAAiB1J,EAAU,IAAKL,EAAUwD,EAAchC,GAEjFnB,EAAU9iB,OAAS,GACnBusB,EAAe9J,EAAS8B,MAAM+H,EAAYxG,WAC1CE,GAAqBuG,EAAcC,EAAiB1J,EAAU,IAAKL,EAAUwD,EAAchC,GACpF,CAACoC,EAAQkG,IAGblG,ECvDIoG,CACHpuB,KACA4lB,EACAzY,EAJgB,CAAEsa,UAAWta,EAAOsa,c,gCA4BxC,OAAQznB,KAAK2Q,YAAYhP,SAAW3B,KAAK0W,eAAe/U,S,8BAUnC,IAAlB8lB,IAAkB,yDACf9B,EAAW,IAAI3lB,KAAKstB,YAAYttB,MAMtC,OALIynB,EACA9B,EAAS0I,UAAUruB,MAEnB2lB,EAAS0I,UAAU,MAEhB1I,I,8BA8CFmC,EAAW3a,GAChB,IAAM6gB,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAEfta,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,GACtC,IAAMigB,EAAcptB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKonB,GACtBrwB,EAASoQ,EAATpQ,KACFswB,EAAsBF,GAAuBrF,EAAWC,EAAWqF,GAErE3U,SAEA1b,IAAS8B,EAAcG,IASvByZ,EAAY,CARUoP,GAAiB7nB,KAAMqtB,EAAqB,CAC9DtwB,KAAM8B,EAAcC,OACpB2oB,UAAWta,EAAOsa,WACnBM,GACkBF,GAAiB7nB,KAAMqtB,EAAqB,CAC7DtwB,KAAM8B,EAAcE,QACpB0oB,UAAWta,EAAOsa,WACnBM,IAIHtP,EADsBoP,GAAiB7nB,KAAMqtB,EAAqBlgB,EAAQ4a,GAI9E,OAAOtP,I,wCAIP,OAAOzY,KAAKsuB,e,8CAWZ,OAPAtuB,KAAKsuB,aAAetuB,KAAK4tB,YAAY/kB,OAAO8K,QAAO,SAACC,EAAK2a,EAAU3yB,GAK/D,OAJAgY,EAAI2a,EAASpyB,QAAU,CACnB+F,MAAOtG,EACP4yB,IAAKD,EAAS5iB,UAEXiI,IACR,IACI5T,O,gCAWPA,KAAKuqB,SAAWvqB,KAAKuqB,QAAQkE,YAAYzuB,MACzCA,KAAKuqB,QAAU,KACfvqB,KAAKoqB,UAAUthB,SAAQ,SAACuhB,GACpBA,EAAME,QAAU,QAEpBvqB,KAAKoqB,UAAY,K,kCA6BRC,GACT,IAAIxW,EAAM7T,KAAKoqB,UAAUnV,WAAU,SAAAyZ,GAAA,OAAWA,IAAYrE,MACjD,IAATxW,GAAa7T,KAAKoqB,UAAU5iB,OAAOqM,EAAK,K,gCAQjC8a,GACP3uB,KAAKuqB,SAAWvqB,KAAKuqB,QAAQkE,YAAYzuB,MACzCA,KAAKuqB,QAAUoE,EACfA,GAAUA,EAAOvE,UAAUhkB,KAAKpG,Q,kCA4BhC,OAAOA,KAAKuqB,U,oCA6BZ,OAAOvqB,KAAKoqB,Y,uCA4BZ,OAAOpqB,KAAK+jB,c,+CA4BZ,OAAO/jB,KAAKskB,wB,uwBCwSL1mB,G,YAtxBX,aAAsB,O,4FAAA,oCAANsJ,EAAM,qBAANA,EAAM,sB,iKAAA,2EACTA,KADS,OAGlB,EAAK0nB,eAAiB,GAHJ,E,0WAgFb/nB,GAQLA,EAAUvK,OAAO8Q,OAAO,GAPL,CACfyhB,MAAO,MACPxsB,UAAW,KACXysB,SAAS,EACTC,cAAc,EACd/c,KAAM,IAE8BnL,GACxC,IAAMgC,EAAS7I,KAAK8lB,uBAAuBjd,OAErCmmB,EAAgBrZ,GAAY5Z,KAC9BiE,KACAA,KAAK8lB,uBAAuBjd,OAC5B7I,KAAK2Q,YACL9J,EAAQkoB,aAAelmB,EAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEC,UAAQ8G,OAASjD,KAAK0W,eAC/D7P,EAAQmL,KACR,CACI8D,WAA8B,WAAlBjP,EAAQgoB,MACpBhZ,SAAUhP,EAAQioB,UAI1B,IAAKjoB,EAAQxE,UACT,OAAO2sB,EAxBG,IA2BN3sB,EAAcwE,EAAdxE,UACA0I,EAAuBikB,EAAvBjkB,KAAMY,EAAiBqjB,EAAjBrjB,OAAQ8J,EAASuZ,EAATvZ,KAChBwZ,EAAatjB,EAAOxD,KAAK,SAAA9E,GAAA,OAAKA,EAAElH,QAEhC+yB,EADgB5yB,OAAO0J,KAAK3D,GACAsR,QAAO,SAACC,EAAK5F,GAC3C,IAAM6F,EAAMob,EAAW9oB,QAAQ6H,GAI/B,OAHa,IAAT6F,GACAD,EAAIxN,KAAK,CAACyN,EAAKxR,EAAU2L,KAEtB4F,IACR,IAgCH,MA9BsB,WAAlB/M,EAAQgoB,MACRK,EAAYpmB,SAAQ,SAACqmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBpkB,EAAKqkB,GAAMtmB,SAAQ,SAACoK,EAAOoc,GACvBvkB,EAAKqkB,GAAME,GAAYD,EAAMtzB,UACzBmE,EACAgT,EACAuC,EAAK6Z,GACL3jB,EAAOyjB,UAKnBrkB,EAAKjC,SAAQ,SAACoK,EAAOoc,GACjBJ,EAAYpmB,SAAQ,SAACqmB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBjc,EAAMkc,GAAQC,EAAMtzB,UAChBmE,EACAgT,EAAMkc,GACN3Z,EAAK6Z,GACL3jB,EAAOyjB,UAMhBJ,I,gCASP,IAAMriB,EAAa3M,KAAK2Q,YAClB4e,EAAM,GAER5iB,EAAWhL,QACMgL,EAAWE,MAAM,KAEzB/D,SAAQ,SAACuK,GAAQ,MACHA,EAAIxG,MAAM,KAAK1E,IAAIO,QADhB,UACjBsE,EADiB,KACVC,EADU,KAGtBA,OAAc/M,IAAR+M,EAAoBA,EAAMD,EAChCuiB,EAAInpB,KAAJ,MAAAmpB,EAAA,GAAYvmB,MAAMiE,EAAMD,EAAQ,GAAGwiB,OAAOrnB,KAAI,SAAC2R,EAAGjG,GAAJ,OAAY7G,EAAQ6G,UAI1E,OAAO0b,I,8BA0BFE,GAAwD,IAA7C/W,EAA6C,uDAAlC,GAAIvL,EAA8B,uDAArB,CAAEsa,WAAW,GAC/CkC,EAAgBA,GAAG8F,EAAUxsB,OAC/BymB,EAAS,CAAC1pB,KAAMyvB,EAAW/W,GACzBiB,EAAenB,gBAAWkR,GAgBhC,OAdAnF,GACIvkB,KACA2Z,EACApL,EAAeG,QACf,CAAE+gB,YAAW9F,gBAAe1Q,eAAgBV,GAAaU,kBACzDP,GAGAvL,EAAOsa,UACP9N,EAAa0U,UAAUruB,MAEvB2Z,EAAa0U,UAAU,MAGpB1U,I,2BAsDL5F,GAA+C,IAA/B5G,EAA+B,uDAAtB,CAAEsa,WAAW,GAClCxE,EAAUjjB,KAAK+mB,QAAQ,CACzB8H,MAAO,MACP7c,KAAM+B,IAEJoK,EAAS8E,EAAQtX,OAAOxD,KAAI,SAAAoD,GAAA,OAASA,EAAMpP,QAC3CuzB,EAAe,CAACvR,GAAQ8C,OAAOgC,EAAQlY,MAEvC4kB,EAAW,IAAI3vB,KAAKstB,YAAYoC,EAAczM,EAAQtX,OAAQ,CAAEmR,WAAY,WAgBlF,OAdAyH,GACIvkB,KACA2vB,EACAphB,EAAeO,KACf3B,EACA4G,GAGA5G,EAAOsa,UACPkI,EAAStB,UAAUruB,MAEnB2vB,EAAStB,UAAU,MAGhBsB,I,gCAwBA/jB,EAAM/E,GACb+E,EAAOA,GAAQ5L,KAAKopB,YACpBviB,EAAUvK,OAAO8Q,OAAO,GAAI,CAAE6U,eAAgB,KAAOpb,GAErD,IAAMgC,EAAS7I,KAAKoQ,gBAAgBvH,OAC9B+mB,EAAU/mB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAEnD,mBAC5B2e,EAAYD,EAAQ,GAAGjuB,OACzBmuB,SACAC,SACAC,SAEJ,GAAIpkB,IAAS7N,EAAWC,UAEpB,IADA8xB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMza,EAAM,GACZ,IAAK0a,EAAS,EAAGA,EAASnnB,EAAOlH,OAAQquB,IACrC1a,EAAIzM,EAAOmnB,GAAQ7zB,QAAUyzB,EAAQI,GAAQD,GAEjDD,EAAe1pB,KAAKkP,QAErB,GAAI1J,IAAS7N,EAAWE,QAAS,CAEpC,IADA6xB,EAAiB,CAACjnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,UAAQ8G,KAAK4D,EAAQob,iBACpD8N,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMza,EAAM,GACZ,IAAK0a,EAAS,EAAGA,EAASnnB,EAAOlH,OAAQquB,IACrC1a,EAAIlP,KAAKwpB,EAAQI,GAAQD,IAE7BD,EAAe1pB,KAAKkP,EAAIrS,KAAK4D,EAAQob,iBAEzC6N,EAAiBA,EAAe7sB,KAAK,UAClC,IAAI2I,IAAS7N,EAAWG,QAU3B,MAAM,IAAIuS,MAAJ,aAAuB7E,EAAvB,qBARN,IADAkkB,EAAiB,CAACjnB,EAAOV,KAAI,SAAAkM,GAAA,OAAKA,EAAElY,WAC/B4zB,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMza,EAAM,GACZ,IAAK0a,EAAS,EAAGA,EAASnnB,EAAOlH,OAAQquB,IACrC1a,EAAIlP,KAAKwpB,EAAQI,GAAQD,IAE7BD,EAAe1pB,KAAKkP,IAM5B,OAAOwa,I,+BAGDvkB,GACN,IAAMyI,EAAYzI,EAAMpP,OACxB6D,KAAK0W,gBAAL,IAA2B1C,EAC3B,IAAMoP,EAAoBpjB,KAAKipB,mBACzBgH,EAAqB7M,EAAkB+F,oBACvCjY,EAAgB3F,EAAM2F,gBACtB+R,EAAU1X,EAAM0F,aAAalG,KAEnC,GAAKqY,EAAkB/X,YAAYE,EAAMpP,QAKlC,CACH,IAAM4M,EAAaqa,EAAkBva,OAAOoM,WAAU,SAAAib,GAAA,OAAaA,EAAU/zB,SAAW6X,KACxFjL,GAAc,IAAMqa,EAAkBva,OAAOE,GAAcwC,QAN3D6X,EAAkBva,OAAOzC,KAAKmF,GAC9B0kB,EAAmBnnB,SAAQ,SAACV,EAAKxM,GAC7BwM,EAAImD,EAAMpP,QAAU,IAAI6P,EAAMkF,EAActV,GAAIqnB,EAAQrnB,GAAI2P,MAapE,OALA6X,EAAkB9X,iBAAmB,KACrC8X,EAAkBrX,iBAAmB,KACrCqX,EAAkB1X,eAAiB,KAEnC1L,KAAKia,wBAAwBkM,wBACtBnmB,O,wCAuCQ2L,EAAQwkB,EAAYhjB,GAAQ,WAC3CxB,EAAS2c,GAAmB3c,GAC5BwB,EAAS7Q,OAAO8Q,OAAO,GAAI,CAAEqa,WAAW,EAAM2I,YAAY,GAASjjB,GAEnE,IAAM6Z,EAAehnB,KAAK4mB,kBACpByJ,EAAUF,EAAW/b,MAAM,EAAG+b,EAAWxuB,OAAS,GAClD2uB,EAAaH,EAAWA,EAAWxuB,OAAS,GAElD,GAAIqlB,EAAarb,EAAOxP,QAAUgR,EAAOijB,WACrC,MAAM,IAAI3f,MAAS9E,EAAOxP,KAApB,sCAGV,IAAMo0B,EAAkBF,EAAQloB,KAAI,SAACoD,GACjC,IAAMilB,EAAYxJ,EAAazb,GAC/B,IAAKilB,EAED,MAAM,IAAI/f,MAASlF,EAAb,gCAEV,OAAOilB,EAAUtuB,SAGfgkB,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAE1BgJ,EAAKvK,EAAM9V,gBAAgBvH,OAC3B6nB,EAAiBH,EAAgBpoB,KAAI,SAAA0L,GAAA,OAAO4c,EAAG5c,MAEjDkG,EAAc,GACdC,EAAgB,kBAAM,EAAK1I,gBAEzBqf,EAAiB,GACvBjkB,EAAmBwZ,EAAMvV,aAAa,SAAC/U,GACnC,IAAMg1B,EAAaF,EAAevoB,KAAI,SAAAoD,GAAA,OAASA,EAAM0F,aAAalG,KAAKnP,MACvE+0B,EAAe/0B,GAAK00B,kBAAcM,GAAd,QAA0Bh1B,EAAGoe,EAAeD,QAhCzB,MAkC3BgE,GAAa,CAAC4S,GAAiB,CAAChlB,GAAS,CAACA,EAAOxP,OAA1DoP,EAlCoC,WA6C3C,OAVA2a,EAAM2K,SAAStlB,GAEfgZ,GACIvkB,KACAkmB,EACA3X,EAAeK,QACf,CAAEzB,OAAQxB,EAAQ9C,OAAQwnB,GAC1BC,GAGGpK,I,gCAWA2E,GAA2D,IAA9C1d,EAA8C,uDAArC,GAAI2jB,EAAiC,aAAjB3E,EAAiB,uDAAJ,GACxDe,EAAkB/f,EAAO+f,gBACzBhC,EAAsB/d,EAAOge,SAC7B4F,EAAU5jB,EAAO4jB,QACjB7E,EAAYzB,GAAiBzqB,MAC7BgrB,EAAuBkB,EAAUyB,sBACjCtB,EAAmB/B,GAAoBtqB,MACvC8qB,EAAa,CACfwB,aAAcD,EACdzI,MAAOsI,GAgBX,OAbA4E,GAAkB9D,GAAmBhC,EAAsB7d,EAAQnN,MACnE4qB,GAAyBC,EAAaC,EAAY,CAAEE,uBAAsBG,SAAUD,GAChF5uB,OAAO8Q,OAAO,CACV2jB,WACD5jB,IAEH+f,GACAN,GAA0B5B,EAAsBF,EAAY,CACxD3d,SACAgf,eAIDnsB,O,yBAUPgxB,EAAWpkB,GACX,OAAQokB,GACR,I5CplBmB,c4CqlBfhxB,KAAK4uB,eAAexoB,KAAKwG,GAG7B,OAAO5M,O,kCASEgxB,GACT,OAAQA,GACR,I5CnmBmB,c4ComBfhxB,KAAK4uB,eAAiB,GAI1B,OAAO5uB,O,wCAUQ6mB,EAAWkK,GAAS,WACf/wB,KAAK4uB,eACX9lB,SAAQ,SAAAud,GAAA,OAAMA,EAAGtqB,KAAK,EAAM8qB,EAAWkK,Q,0BA8CpDE,EAAkB9jB,GACnB,IAAM6Z,EAAehnB,KAAK4mB,kBAE1B,IAAKI,EAAaiK,GACd,MAAM,IAAIxgB,MAAJ,SAAmBwgB,EAAnB,kBAGV,IAAMC,EAAe/jB,EAAOhR,MAAW80B,EAAlB,UAErB,GAAIjK,EAAakK,GACb,MAAM,IAAIzgB,MAAJ,SAAmBygB,EAAnB,mBAGV,IAb2B,E7CvnB5B,SAAgCC,EAAcxkB,EAAYQ,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAASqjB,EAAmCjkB,EAAnCikB,UAAWtjB,EAAwBX,EAAxBW,QAASd,EAAeG,EAAfH,MAAOC,EAAQE,EAARF,IAD2B,EAEhDkkB,EAAalV,SAFmC,SAE9DoV,EAF8D,KAExDC,EAFwD,KAIhEvjB,IACDf,EAAmB,IAAVA,KAAiBA,GAASA,EAAQqkB,GAASA,EAAOrkB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAMqkB,GAAUA,EAAO,EAAKrkB,EAErDmkB,IACAtjB,EAAU9J,KAAKutB,KAAKvtB,KAAKwtB,IAAIvkB,EAAMD,GAASokB,IAGhDrjB,EAAUF,EAAgBC,EAASd,EAAOC,IAG1Cc,EAAQ,GAAKsjB,GACbtjB,EAAQpG,QAAQ0pB,GAEhBtjB,EAAQA,EAAQpM,OAAS,IAAM2vB,GAC/BvjB,EAAQ3H,KAAKkrB,EAAO,GAIxB,IADA,IAAMpjB,EAAe,GACZtS,EAAI,EAAGA,EAAImS,EAAQpM,OAAS,EAAG/F,IACpCsS,EAAa9H,KAAK,CACd4G,MAAOe,EAAQnS,GACfqR,IAAKc,EAAQnS,EAAI,KAIzB,IAAM61B,EAAa,GAYnB,OAXA/kB,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMsX,EAAQie,EAAalgB,aAAalG,KAAKnP,GAC7C,GAAIsX,aAAiBhG,EACjBukB,EAAWrrB,KAAK8M,OADpB,CAKA,IAAM1R,EAAQyM,EAAgBC,EAAcgF,GAC5Cue,EAAWrrB,KAAQ5E,EAAMwL,MAAzB,IAAkCxL,EAAMyL,SAGrC,CAAEwkB,aAAYpU,KAAMtP,G6C2lBM2jB,CADR1xB,KAAKoQ,gBAAgB/E,YAAY4lB,GACWjxB,KAAK2Q,YAAaxD,GAA3EskB,EAdmB,EAcnBA,WAAYpU,EAdO,EAcPA,KAEdsU,EAAW5T,GAAa,CAAC0T,GAAa,CACxC,CACIt1B,KAAM+0B,EACNtlB,KAAMlN,EAAUE,UAChBkc,QAAS1c,EAAiBG,OAC1B8e,SACA,CAAC6T,IAAe,GAElBhL,EAAQlmB,KAAKkmB,MAAM/Y,EAAOsa,WAWhC,OAVAvB,EAAM2K,SAASc,GAEfpN,GACIvkB,KACAkmB,EACA3X,EAAeM,IACd,CAAEoiB,mBAAkB9jB,SAAQ+jB,gBAC5B,MAGEhL,I,qCA8BP,OAAO,IAAItoB,EAHEoC,KAAK4xB,UAAU7zB,EAAWC,WACxBgC,KAAK6xB,e,iCA+CZpY,EAAcL,EAAWjM,GACjC,IAAM6Z,EAAehnB,KAAK4mB,kBAE1BnN,EAAa3Q,SAAQ,SAACkL,GAClB,IAAKgT,EAAahT,GACd,MAAM,IAAIvD,MAAJ,SAAmBuD,EAAnB,mCAId,IAAMga,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAKf,OF3iBuB,SAACrD,EAAU3K,GAAiD,IAAnCL,EAAmC,uDAAvB,SAAAjY,GAAA,OAAOA,GAAKgM,EAAW,aAEnFsa,EACAta,EADAsa,UAEElO,EAAgB6K,EAAShU,gBAAgB/E,YAJwC,EASnFqa,GACAtB,EAAS8B,MAAMuB,GACfrO,EACAjM,EACAiX,GACA,sCAAIsF,EAAJ,qBAAIA,EAAJ,uBAAepE,GAAuBA,aAAIoE,EAA3B,QAAmCjQ,EAAcF,QAPhEgM,EAPmF,EAOnFA,gBACAC,EARmF,EAQnFA,aASEsM,EAAY,GAoBlB,OAnBAx1B,OAAO0J,KAAKuf,GAAiBvT,OAAOlJ,SAAQ,SAACzF,GACzC,GAAIkiB,EAAgBliB,GAAI,CACpB,IAAM2kB,EAAS5D,EAAS8B,MAAMuB,GACxBgC,EAAajE,EAAaniB,GAChC2kB,EAAOrX,YAAc4U,EAAgBliB,GAAGJ,KAAK,KAC7C+kB,EAAO/N,wBAAwBkM,wBAI3BsB,GACAlD,GAAmBH,EAAU4D,EAAQzZ,EAAeC,OAAQrB,GAHtC,SAAAtE,GAAA,OAAU4Q,EAAa8N,OAAM,SAAAzN,GAAA,OAAKjR,EAAOiR,GAAGO,gBAAkBoP,EAAWzjB,KAAK8T,SAKxGkO,EAAOjE,YAAYiE,EAAOjE,YAAYpiB,OAAS,GAAGsiB,KAAOuB,EAAaniB,GAEtEyuB,EAAU1rB,KAAK4hB,OAKhB8J,EEsgBIC,CAAgB/xB,KAAMyZ,EAAcL,EAF3CjM,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,M,sCAyCmB,IAA9C6kB,EAA8C,uDAA/B,GAAIC,EAA2B,uDAAZ,GAAI9kB,EAAQ,aACnD6gB,EAAY,CACdjxB,KAAM8B,EAAcC,OACpB2oB,WAAW,GAET2F,EAAcptB,KAAK4mB,kBACnBmB,EAAYzrB,OAAO0J,KAAKonB,GACxB8E,EAA0B,CAAC,CAACD,IAalC,OAXA9kB,EAAS7Q,OAAO8Q,OAAO,GAAI4gB,EAAW7gB,IACtC6kB,EAAeA,EAAarwB,OAASqwB,EAAe,CAAC,KAGxClpB,SAAQ,SAACqpB,EAAUv2B,GAC5Bs2B,EAAwBt2B,GAAKuxB,GAAuBA,GAADA,UAC3CgF,GADqB,GACRF,IACjBlK,EACAqF,MAGDjF,GAAiBnoB,KAAMkyB,EAAyB/kB,EAAQ4a,M,kDApuBhC5a,GAC/B,OAAOD,EAAkBI,iBAAiBH,K,+BA7B1C,OAAOoL,K,iCAOP,OAAOwK,K,iCAOP,OAAOjF,O,GAnES0P,ICxCTvW,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKK,GAA0CF,GAA1CE,IAAKC,GAAqCH,GAArCG,IAAK0a,GAAgC7a,GAAhC6a,MAAOC,GAAyB9a,GAAzB8a,KAAMC,GAAmB/a,GAAnB+a,MAAYC,GAAOhb,GAAZib,ICyBjDC,GAAY,CACdC,QC2LmB,sCAAIC,EAAJ,qBAAIA,EAAJ,uBACnB,SAAC/b,GAAqC,IAAjCzJ,EAAiC,uDAAxB,CAAEsa,WAAW,GACnBmL,EAAYhc,EACZic,SACEvJ,EAAc,GA8BpB,OA5BAqJ,EAAW7pB,SAAQ,SAAC+a,GAChB+O,EAAY/O,EAAU+O,GACtBtJ,EAAYljB,KAAZ,MAAAkjB,EAAA,EAAoBsJ,EAAU7O,cACzB8O,IACDA,EAAaD,MAIjBC,GAAcA,IAAeD,GAC7BC,EAAWC,UAIfF,EAAUtO,oBAAsB,GAChCC,GACI3N,EACAgc,EACArkB,EAAeI,QACf,KACA2a,GAGAnc,EAAOsa,UACPmL,EAAUvE,UAAUzX,GAEpBgc,EAAUvE,UAAU,MAGjBuE,ID5NXG,ICyHe,sCAAI7rB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGmc,IAAH,MAAAnc,EAAU1P,KDxH5CsgB,OC6BkB,sCAAItgB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4Q,OAAH,MAAA5Q,EAAa1P,KD5BlD8rB,QC4DmB,sCAAI9rB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGoc,QAAH,MAAApc,EAAc1P,KD3DpDsR,QCmJmB,sCAAItR,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG4B,QAAH,MAAA5B,EAAc1P,KDlJpD+rB,kBE1B6B,sCAAI/rB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAGqc,kBAAH,MAAArc,EAAwB1P,KF2BxE8K,KElBgB,sCAAI9K,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA0P,GAAA,OAAMA,EAAG5E,KAAH,MAAA4E,EAAW1P,KFmB9C0I,eACAyG,cACA6c,YGlCG,SAAsB1Y,EAAYC,GACrC,OAAO7K,EAAa4K,EAAYC,EAAYP,GAAkBM,EAAYC,IAAa,IHkCvFF,iBACAG,kBACAyY,clC3BG,SAAwB3Y,EAAYC,EAAY1K,GACnD,OAAOuK,GAAMC,GAAcC,EAAYC,EAAY1K,GAAW2K,GAAeF,EAAYC,EAAY1K,KkC2BrGuK,SACA5N,sBAGE0mB,G,KAAcA,QACpB92B,OAAO8Q,OAAOxP,GAAW,CACrB60B,aACAY,QACA9kB,iBACAzO,oBACA/B,aACAc,gBACAqO,oBACAkmB,WACA9U,iBACAgV,iBACDC,GAEY31B","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n // constructor (schema) {\n // super();\n // this.schema = schema;\n // this._dtf = new DateTimeFormatter(format);\n // }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from './model/dataConverter';\nimport DSVStringConverter from './defaultConverters/dsvStringConverter';\nimport JSONConverter from './defaultConverters/jsonConverter';\nimport DSVArrayConverter from './defaultConverters/dsvArrayConverter';\nimport AutoDataConverter from './defaultConverters/autoCoverter';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n *\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n *\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const { type, subtype, name } = unitSchema;\n if (type === FieldType.DIMENSION || type === FieldType.MEASURE) {\n if (!fieldRegistry.has(subtype)) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n } else {\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./node_modules/d3-dsv/src/autoType.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/index.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/converter/defaultConverters/autoConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/defaultConverters/index.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/index.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/enums/index.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/index.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/index.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/natural-join.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/domain-generator.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/utils/index.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/value.js"],"names":["autoType","object","key","value","trim","number","NaN","isNaN","test","Date","csv","dsv","csvParse","parse","csvParseRows","parseRows","csvFormat","format","csvFormatBody","formatBody","csvFormatRows","formatRows","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","Function","map","name","i","JSON","stringify","join","customConverter","f","row","inferColumns","rows","columnSet","Object","create","forEach","column","push","pad","width","s","length","Array","formatYear","year","formatDate","date","hours","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","RegExp","DELIMITER","charCodeAt","text","convert","N","I","n","t","eof","eol","token","j","c","slice","replace","preformatBody","formatValue","concat","formatRow","tsv","tsvParse","tsvParseRows","tsvFormat","tsvFormatBody","tsvFormatRows","PROPAGATION","ROW_ID","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","AND","OR","DataConverterStore","store","Map","converters","_getDefaultConverters","DSVStringConverter","DSVArrayConverter","JSONConverter","AutoDataConverter","set","converter","type","DataConverter","delete","has","get","converterStore","getStore","DataFormat","AUTO","data","schema","options","DSV_ARR","DSVArr","DSV_STR","DSVStr","FLAT_JSON","FlatJSON","_type","Error","Auto","dataFormat","detectDataFormat","arr","isArray","defaultOption","firstRowHeader","schemaFields","unitSchema","assign","columnMajor","headers","splice","headerMap","reduce","acc","h","fields","field","schemaField","headIndex","str","fieldSeparator","d3Dsv","header","insertionIndex","schemaFieldsName","item","keys","DataModel","args","_onPropagation","defOptions","order","formatter","withUid","getAllFields","sort","getPartialFieldspace","dataGenerated","dataBuilder","call","_rowDiffset","d","_colIdentifier","columnWise","addUid","uids","fieldNames","e","fmtFieldNames","fmtFieldIdx","next","idx","indexOf","elem","fIdx","fmtFn","datum","datumIdx","undefined","rowDiffset","ids","diffSets","split","Number","start","end","fill","_","fieldsArr","reducers","config","saveChild","groupByString","params","newDataModel","groupBy","persistDerivations","defaultReducer","reducerStore","setParent","sortingDetails","rawData","getData","dataInCSVArr","sortedDm","constructor","_dataFormat","getFieldspace","colData","formattedData","rowsCount","serializedData","rowIdx","colIdx","fieldName","partialFieldspace","_partialFieldspace","cachedValueObjects","_cachedValueObjects","partialField","fieldsObj","obj","Value","fieldIndex","findIndex","fieldinst","_cachedFieldsObj","_cachedDimension","_cachedMeasure","__calculateFieldspace","calculateFieldsConfig","dependency","sanitizeUnitSchema","replaceVar","fieldsConfig","getFieldsConfig","depVars","retrieveFn","depFieldIndices","fieldSpec","index","clone","fs","suppliedFields","cachedStore","cloneProvider","detachedRoot","computedValues","rowDiffsetIterator","fieldsData","createFields","addField","identifiers","addToNameSpace","propConfig","isMutableAction","propagationSourceId","sourceId","payload","rootModel","getRootDataModel","propagationNameSpace","_propagationNameSpace","rootGroupByModel","getRootGroupByModel","rootModels","groupByModel","model","addToPropNamespace","propagateToAllDataModels","propagateImmutableActions","eventName","callback","propModel","propListeners","fn","measureFieldName","binFieldName","measureField","createBinnedFieldData","binnedData","bins","binField","FieldType","DIMENSION","subtype","DimensionSubtype","BINNED","serialize","getSchema","dimensionArr","reducerFn","defConfig","mode","FilteringMode","NORMAL","splitWithSelect","uniqueFields","commonFields","fieldConfig","allFields","normalizedProjFieldSets","fieldSet","getNormalizedProFields","splitWithProject","InvalidAwareTypes","invalidAwareVals","fieldRegistry","Relation","CATEGORICAL","TEMPORAL","MEASURE","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","MeasureSubtype","CONTINUOUS","Operators","compose","bin","select","project","calculateVariable","crossProduct","difference","naturalJoin","leftOuterJoin","rightOuterJoin","fullOuterJoin","union","version","pkg","Stats","DateTimeFormatter","FieldsUtility","enums","createUnitField","BUILDER","build","createUnitFieldFromPartial","dataColumn","headersObj","fieldStore","createNamespace","fieldArr","dataId","getUniqueId","getMeasure","measureFields","getDimension","dimensionFields","Binned","binsArr","BinnedParser","Dimension","Categorical","hash","Set","domain","add","CategoricalParser","Continuous","min","POSITIVE_INFINITY","max","NEGATIVE_INFINITY","ContinuousParser","Measure","_cachedDomain","calculateDataDomain","Field","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","Temporal","description","displayName","builder","_params","_context","PartialField","parser","unit","defAggFn","defaultReducerName","numberFormat","formatNumber","val","regex","String","result","isInvalid","matched","match","parseFloat","NA","getInvalidType","FieldParser","parsedVal","TemporalParser","_dtf","nativeDate","getNativeDate","getTime","_sanitize","_cachedMinDiff","sortedData","filter","a","b","arrLn","minDiff","prevDatum","nextDatum","processedCount","Math","isFinite","parsedDatum","formatAs","prepareSelectionData","resp","entries","prepareJoinData","formattedValue","rawValue","updateFields","fieldStoreName","colIdentifier","collID","partialFieldMap","newFields","coll","persistCurrentDerivation","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","lastInsertedValueSel","lastInsertedValueRej","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","fieldStoreObj","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","selectorHelperFn","cloneWithAllFields","getKey","len","keyFn","rowId","internalValue","boundsChecker","domainArr","some","dom","isWithinDomain","fieldType","filterPropagationModel","propModels","fns","filterByMeasure","clonedModel","modelFieldsConfig","range","values","dLen","valuesMap","rangeKeys","present","every","def","filteredModel","clonedDMs","cloned","derivation","derivationFormula","addDiffsetToClonedDm","selectConfig","cloneWithSelect","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithProject","projField","projectionSet","actualProjField","projFieldSet","projFields","extend2","validateUnitSchema","sanitizeAndValidateSchema","resolveFieldName","dataHeader","fieldNameAs","as","updateData","relation","defaultConfig","nameSpace","valueObjects","rawFieldsData","formattedFieldsData","fieldInSchema","getDerivationArguments","applyExistingOperationOnModel","dataModel","derivations","getDerivations","selectionModel","getFilteredModel","path","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","propagate","handlePropagation","children","_children","child","_parent","find","getPathToRootModel","propagationInf","propagateToSource","propagateInterpolatedValues","filterFn","entry","criterias","persistent","actionCriterias","mutableActions","filteredCriteria","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","sourceIdentifiers","inf","propagationModel","reverse","immutableActions","filterImmutableAction","criteriaModel","sourceNamespace","normalizedProjField","search","from","getNumberFormattedVal","require","module","exports","default","_invalidAwareValsMap","_value","NULL","NIL","invalid","nil","null","generateBuckets","binSize","buckets","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","binsCount","dMin","dMax","ceil","abs","unshift","dm","operations","currentDM","firstChild","dispose","defaultFilterFn","dm1","dm2","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","getCommonSchema","tmpSchema","rowAdded","rowPosition","ii","tuple","userArg","cloneProvider1","cloneProvider2","dm1Fields","dm2Fields","tupleObj","cellVal","iii","retObj","reqSorting","tmpDataArr","colIArr","colName","insertInd","sortData","tmpData","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","isArrEqual","prepareDataHelper","addData","hashData","schemaName","fs1","fs2","retArr","fs1Arr","getFilteredValues","sum","filteredNumber","totalSum","curr","avg","filteredValues","first","last","count","variance","mean","num","std","sqrt","fnList","getFieldArr","dimensions","getReducerObj","measures","defReducer","measureName","resolve","existingDataModel","sFieldArr","reducerObj","dbName","measureArr","hashMap","rowCount","defSortFn","a1","b1","merge","lo","mid","hi","sortFn","mainArr","auxArr","mergeSort","naturalJoinFilter","commonSchemaArr","retainTuple","dataModel1","dataModel2","rowDiffArr","diffStr","diffStsArr","getSortFn","dataType","sortType","retFunc","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","toLowerCase","groupData","groupedData","fieldVal","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","arg","label","applyStandardSort","sortMeta","isCallable","sortingFn","m","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","applyGroupSort","detail","sortedGroupMap","nextMap","shift","dataObj","sDetial","groupSortingIdx","standardSortingDetails","groupSortingDetails","pop","source","_fieldStoreName","_fieldspace","joinWith","unionWith","differenceWith","projectionClone","rejectionClone","_fieldConfig","fieldObj","removeChild","sibling","parent","sd","convertToNativeDate","escape","dtParams","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","parseInt","defaultRangeParser","l","nVal","getTokenDefinitions","daysDef","short","long","monthsDef","definitions","H","extract","getHours","toString","p","P","M","mins","getMinutes","S","getSeconds","K","ms","getMilliseconds","day","getDay","A","getDate","month","getMonth","B","y","substring","presentDate","presentYear","trunc","getFullYear","Y","getTokenFormalNames","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","occurrence","forwardChar","nDate","formattedStr","formattedVal","prototype","dateTimeStamp","extractTokenValue","dtParamSeq","noBreak","dtParamArr","resolverKey","resolverParams","resolverFn","param","resolvedVal","hasOwnProperty","apply","checkIfOnlyYear","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","occurrenceLength","extractValues","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","obj1","obj2","skipUndef","tgtArr","srcArr","srcVal","tgtVal","cRef","isObject","isString","uniqueValues","round","random","arr1","arr2","ReducerStore","reducer","__unregister","defineProperties","enumerable","configurable","writable","_formattedValue","_internalValue"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD,O;ACVA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,kDAA0C,gCAAgC;AAC1E;AACA;;AAEA;AACA;AACA;AACA,gEAAwD,kBAAkB;AAC1E;AACA,yDAAiD,cAAc;AAC/D;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iDAAyC,iCAAiC;AAC1E,wHAAgH,mBAAmB,EAAE;AACrI;AACA;;AAEA;AACA;AACA;AACA,mCAA2B,0BAA0B,EAAE;AACvD,yCAAiC,eAAe;AAChD;AACA;AACA;;AAEA;AACA,8DAAsD,+DAA+D;;AAErH;AACA;;;AAGA;AACA;;;;;;;;;;;;;AClFA;AAAA;AAAe,SAASA,QAAT,CAAkBC,MAAlB,EAA0B;AACvC,OAAK,IAAIC,GAAT,IAAgBD,MAAhB,EAAwB;AACtB,QAAIE,QAAQF,OAAOC,GAAP,EAAYE,IAAZ,EAAZ;AAAA,QAAgCC,MAAhC;AACA,QAAI,CAACF,KAAL,EAAYA,QAAQ,IAAR,CAAZ,KACK,IAAIA,UAAU,MAAd,EAAsBA,QAAQ,IAAR,CAAtB,KACA,IAAIA,UAAU,OAAd,EAAuBA,QAAQ,KAAR,CAAvB,KACA,IAAIA,UAAU,KAAd,EAAqBA,QAAQG,GAAR,CAArB,KACA,IAAI,CAACC,MAAMF,SAAS,CAACF,KAAhB,CAAL,EAA6BA,QAAQE,MAAR,CAA7B,KACA,IAAI,8FAA8FG,IAA9F,CAAmGL,KAAnG,CAAJ,EAA+GA,QAAQ,IAAIM,IAAJ,CAASN,KAAT,CAAR,CAA/G,KACA;AACLF,WAAOC,GAAP,IAAcC,KAAd;AACD;AACD,SAAOF,MAAP;AACD,C;;;;;;;;;;;;ACbD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAEA,IAAIS,MAAMC,oDAAGA,CAAC,GAAJ,CAAV;;AAEO,IAAIC,WAAWF,IAAIG,KAAnB;AACA,IAAIC,eAAeJ,IAAIK,SAAvB;AACA,IAAIC,YAAYN,IAAIO,MAApB;AACA,IAAIC,gBAAgBR,IAAIS,UAAxB;AACA,IAAIC,gBAAgBV,IAAIW,UAAxB,C;;;;;;;;;;;;ACRP;AAAA,IAAIC,MAAM,EAAV;AAAA,IACIC,MAAM,EADV;AAAA,IAEIC,QAAQ,EAFZ;AAAA,IAGIC,UAAU,EAHd;AAAA,IAIIC,SAAS,EAJb;;AAMA,SAASC,eAAT,CAAyBC,OAAzB,EAAkC;AAChC,SAAO,IAAIC,QAAJ,CAAa,GAAb,EAAkB,aAAaD,QAAQE,GAAR,CAAY,UAASC,IAAT,EAAeC,CAAf,EAAkB;AAClE,WAAOC,KAAKC,SAAL,CAAeH,IAAf,IAAuB,MAAvB,GAAgCC,CAAhC,GAAoC,GAA3C;AACD,GAFqC,EAEnCG,IAFmC,CAE9B,GAF8B,CAAb,GAEV,GAFR,CAAP;AAGD;;AAED,SAASC,eAAT,CAAyBR,OAAzB,EAAkCS,CAAlC,EAAqC;AACnC,MAAIpC,SAAS0B,gBAAgBC,OAAhB,CAAb;AACA,SAAO,UAASU,GAAT,EAAcN,CAAd,EAAiB;AACtB,WAAOK,EAAEpC,OAAOqC,GAAP,CAAF,EAAeN,CAAf,EAAkBJ,OAAlB,CAAP;AACD,GAFD;AAGD;;AAED;AACA,SAASW,YAAT,CAAsBC,IAAtB,EAA4B;AAC1B,MAAIC,YAAYC,OAAOC,MAAP,CAAc,IAAd,CAAhB;AAAA,MACIf,UAAU,EADd;;AAGAY,OAAKI,OAAL,CAAa,UAASN,GAAT,EAAc;AACzB,SAAK,IAAIO,MAAT,IAAmBP,GAAnB,EAAwB;AACtB,UAAI,EAAEO,UAAUJ,SAAZ,CAAJ,EAA4B;AAC1Bb,gBAAQkB,IAAR,CAAaL,UAAUI,MAAV,IAAoBA,MAAjC;AACD;AACF;AACF,GAND;;AAQA,SAAOjB,OAAP;AACD;;AAED,SAASmB,GAAT,CAAa5C,KAAb,EAAoB6C,KAApB,EAA2B;AACzB,MAAIC,IAAI9C,QAAQ,EAAhB;AAAA,MAAoB+C,SAASD,EAAEC,MAA/B;AACA,SAAOA,SAASF,KAAT,GAAiB,IAAIG,KAAJ,CAAUH,QAAQE,MAAR,GAAiB,CAA3B,EAA8Bf,IAA9B,CAAmC,CAAnC,IAAwCc,CAAzD,GAA6DA,CAApE;AACD;;AAED,SAASG,UAAT,CAAoBC,IAApB,EAA0B;AACxB,SAAOA,OAAO,CAAP,GAAW,MAAMN,IAAI,CAACM,IAAL,EAAW,CAAX,CAAjB,GACHA,OAAO,IAAP,GAAc,MAAMN,IAAIM,IAAJ,EAAU,CAAV,CAApB,GACAN,IAAIM,IAAJ,EAAU,CAAV,CAFJ;AAGD;;AAED,SAASC,UAAT,CAAoBC,IAApB,EAA0B;AACxB,MAAIC,QAAQD,KAAKE,WAAL,EAAZ;AAAA,MACIC,UAAUH,KAAKI,aAAL,EADd;AAAA,MAEIC,UAAUL,KAAKM,aAAL,EAFd;AAAA,MAGIC,eAAeP,KAAKQ,kBAAL,EAHnB;AAIA,SAAOxD,MAAMgD,IAAN,IAAc,cAAd,GACDH,WAAWG,KAAKS,cAAL,EAAX,EAAkC,CAAlC,IAAuC,GAAvC,GAA6CjB,IAAIQ,KAAKU,WAAL,KAAqB,CAAzB,EAA4B,CAA5B,CAA7C,GAA8E,GAA9E,GAAoFlB,IAAIQ,KAAKW,UAAL,EAAJ,EAAuB,CAAvB,CAApF,IACCJ,eAAe,MAAMf,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAA9C,GAAoDX,IAAIa,OAAJ,EAAa,CAAb,CAApD,GAAsE,GAAtE,GAA4Eb,IAAIe,YAAJ,EAAkB,CAAlB,CAA5E,GAAmG,GAAlH,GACDF,UAAU,MAAMb,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAA9C,GAAoDX,IAAIa,OAAJ,EAAa,CAAb,CAApD,GAAsE,GAAhF,GACAF,WAAWF,KAAX,GAAmB,MAAMT,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAAjE,GACA,EAJA,CADN;AAMD;;AAEc,yEAASS,SAAT,EAAoB;AACjC,MAAIC,WAAW,IAAIC,MAAJ,CAAW,QAAQF,SAAR,GAAoB,OAA/B,CAAf;AAAA,MACIG,YAAYH,UAAUI,UAAV,CAAqB,CAArB,CADhB;;AAGA,WAAS1D,KAAT,CAAe2D,IAAf,EAAqBnC,CAArB,EAAwB;AACtB,QAAIoC,OAAJ;AAAA,QAAa7C,OAAb;AAAA,QAAsBY,OAAOzB,UAAUyD,IAAV,EAAgB,UAASlC,GAAT,EAAcN,CAAd,EAAiB;AAC5D,UAAIyC,OAAJ,EAAa,OAAOA,QAAQnC,GAAR,EAAaN,IAAI,CAAjB,CAAP;AACbJ,gBAAUU,GAAV,EAAemC,UAAUpC,IAAID,gBAAgBE,GAAhB,EAAqBD,CAArB,CAAJ,GAA8BV,gBAAgBW,GAAhB,CAAvD;AACD,KAH4B,CAA7B;AAIAE,SAAKZ,OAAL,GAAeA,WAAW,EAA1B;AACA,WAAOY,IAAP;AACD;;AAED,WAASzB,SAAT,CAAmByD,IAAnB,EAAyBnC,CAAzB,EAA4B;AAC1B,QAAIG,OAAO,EAAX;AAAA,QAAe;AACXkC,QAAIF,KAAKtB,MADb;AAAA,QAEIyB,IAAI,CAFR;AAAA,QAEW;AACPC,QAAI,CAHR;AAAA,QAGW;AACPC,KAJJ;AAAA,QAIO;AACHC,UAAMJ,KAAK,CALf;AAAA,QAKkB;AACdK,UAAM,KANV,CAD0B,CAOT;;AAEjB;AACA,QAAIP,KAAKD,UAAL,CAAgBG,IAAI,CAApB,MAA2BjD,OAA/B,EAAwC,EAAEiD,CAAF;AACxC,QAAIF,KAAKD,UAAL,CAAgBG,IAAI,CAApB,MAA2BhD,MAA/B,EAAuC,EAAEgD,CAAF;;AAEvC,aAASM,KAAT,GAAiB;AACf,UAAIF,GAAJ,EAAS,OAAOvD,GAAP;AACT,UAAIwD,GAAJ,EAAS,OAAOA,MAAM,KAAN,EAAazD,GAApB;;AAET;AACA,UAAIU,CAAJ;AAAA,UAAOiD,IAAIN,CAAX;AAAA,UAAcO,CAAd;AACA,UAAIV,KAAKD,UAAL,CAAgBU,CAAhB,MAAuBzD,KAA3B,EAAkC;AAChC,eAAOmD,MAAMD,CAAN,IAAWF,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBnD,KAAlC,IAA2CgD,KAAKD,UAAL,CAAgB,EAAEI,CAAlB,MAAyBnD,KAA3E;AACA,YAAI,CAACQ,IAAI2C,CAAL,KAAWD,CAAf,EAAkBI,MAAM,IAAN,CAAlB,KACK,IAAI,CAACI,IAAIV,KAAKD,UAAL,CAAgBI,GAAhB,CAAL,MAA+BlD,OAAnC,EAA4CsD,MAAM,IAAN,CAA5C,KACA,IAAIG,MAAMxD,MAAV,EAAkB;AAAEqD,gBAAM,IAAN,CAAY,IAAIP,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBlD,OAA3B,EAAoC,EAAEkD,CAAF;AAAM;AAC/E,eAAOH,KAAKW,KAAL,CAAWF,IAAI,CAAf,EAAkBjD,IAAI,CAAtB,EAAyBoD,OAAzB,CAAiC,KAAjC,EAAwC,IAAxC,CAAP;AACD;;AAED;AACA,aAAOT,IAAID,CAAX,EAAc;AACZ,YAAI,CAACQ,IAAIV,KAAKD,UAAL,CAAgBvC,IAAI2C,GAApB,CAAL,MAAmClD,OAAvC,EAAgDsD,MAAM,IAAN,CAAhD,KACK,IAAIG,MAAMxD,MAAV,EAAkB;AAAEqD,gBAAM,IAAN,CAAY,IAAIP,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBlD,OAA3B,EAAoC,EAAEkD,CAAF;AAAM,SAA1E,MACA,IAAIO,MAAMZ,SAAV,EAAqB;AAC1B,eAAOE,KAAKW,KAAL,CAAWF,CAAX,EAAcjD,CAAd,CAAP;AACD;;AAED;AACA,aAAO8C,MAAM,IAAN,EAAYN,KAAKW,KAAL,CAAWF,CAAX,EAAcP,CAAd,CAAnB;AACD;;AAED,WAAO,CAACG,IAAIG,OAAL,MAAkBzD,GAAzB,EAA8B;AAC5B,UAAIe,MAAM,EAAV;AACA,aAAOuC,MAAMvD,GAAN,IAAauD,MAAMtD,GAA1B;AAA+Be,YAAIQ,IAAJ,CAAS+B,CAAT,GAAaA,IAAIG,OAAjB;AAA/B,OACA,IAAI3C,KAAK,CAACC,MAAMD,EAAEC,GAAF,EAAOsC,GAAP,CAAP,KAAuB,IAAhC,EAAsC;AACtCpC,WAAKM,IAAL,CAAUR,GAAV;AACD;;AAED,WAAOE,IAAP;AACD;;AAED,WAAS6C,aAAT,CAAuB7C,IAAvB,EAA6BZ,OAA7B,EAAsC;AACpC,WAAOY,KAAKV,GAAL,CAAS,UAASQ,GAAT,EAAc;AAC5B,aAAOV,QAAQE,GAAR,CAAY,UAASe,MAAT,EAAiB;AAClC,eAAOyC,YAAYhD,IAAIO,MAAJ,CAAZ,CAAP;AACD,OAFM,EAEJV,IAFI,CAECgC,SAFD,CAAP;AAGD,KAJM,CAAP;AAKD;;AAED,WAASlD,MAAT,CAAgBuB,IAAhB,EAAsBZ,OAAtB,EAA+B;AAC7B,QAAIA,WAAW,IAAf,EAAqBA,UAAUW,aAAaC,IAAb,CAAV;AACrB,WAAO,CAACZ,QAAQE,GAAR,CAAYwD,WAAZ,EAAyBnD,IAAzB,CAA8BgC,SAA9B,CAAD,EAA2CoB,MAA3C,CAAkDF,cAAc7C,IAAd,EAAoBZ,OAApB,CAAlD,EAAgFO,IAAhF,CAAqF,IAArF,CAAP;AACD;;AAED,WAAShB,UAAT,CAAoBqB,IAApB,EAA0BZ,OAA1B,EAAmC;AACjC,QAAIA,WAAW,IAAf,EAAqBA,UAAUW,aAAaC,IAAb,CAAV;AACrB,WAAO6C,cAAc7C,IAAd,EAAoBZ,OAApB,EAA6BO,IAA7B,CAAkC,IAAlC,CAAP;AACD;;AAED,WAASd,UAAT,CAAoBmB,IAApB,EAA0B;AACxB,WAAOA,KAAKV,GAAL,CAAS0D,SAAT,EAAoBrD,IAApB,CAAyB,IAAzB,CAAP;AACD;;AAED,WAASqD,SAAT,CAAmBlD,GAAnB,EAAwB;AACtB,WAAOA,IAAIR,GAAJ,CAAQwD,WAAR,EAAqBnD,IAArB,CAA0BgC,SAA1B,CAAP;AACD;;AAED,WAASmB,WAAT,CAAqBnF,KAArB,EAA4B;AAC1B,WAAOA,SAAS,IAAT,GAAgB,EAAhB,GACDA,iBAAiBM,IAAjB,GAAwB6C,WAAWnD,KAAX,CAAxB,GACAiE,SAAS5D,IAAT,CAAcL,SAAS,EAAvB,IAA6B,OAAOA,MAAMiF,OAAN,CAAc,IAAd,EAAoB,MAApB,CAAP,GAAqC,IAAlE,GACAjF,KAHN;AAID;;AAED,SAAO;AACLU,WAAOA,KADF;AAELE,eAAWA,SAFN;AAGLE,YAAQA,MAHH;AAILE,gBAAYA,UAJP;AAKLE,gBAAYA;AALP,GAAP;AAOD,C;;;;;;;;;;;;ACjKD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;;;;;;;;;;;;ACFA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAEA,IAAIoE,MAAM9E,oDAAGA,CAAC,IAAJ,CAAV;;AAEO,IAAI+E,WAAWD,IAAI5E,KAAnB;AACA,IAAI8E,eAAeF,IAAI1E,SAAvB;AACA,IAAI6E,YAAYH,IAAIxE,MAApB;AACA,IAAI4E,gBAAgBJ,IAAItE,UAAxB;AACA,IAAI2E,gBAAgBL,IAAIpE,UAAxB,C;;;;;;;;;;;;;;;;;;;;;;;ACRP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;;AAGO,IAAM0E,cAAc,aAApB;;AAEP;;;AAGO,IAAMC,SAAS,QAAf;;AAEP;;;AAGO,IAAMC,iBAAiB;AAC1BC,YAAQ,QADkB;AAE1BC,aAAS,SAFiB;AAG1BC,aAAS,OAHiB;AAI1BC,aAAS,SAJiB;AAK1BC,aAAS,oBALiB;AAM1BC,SAAK,KANqB;AAO1BC,UAAM;AAPoB,CAAvB;;AAUA,IAAMC,QAAQ;AACjBC,WAAO,OADU;AAEjBC,eAAW,WAFM;AAGjBC,gBAAY,YAHK;AAIjBC,aAAS,SAJQ;AAKjBC,eAAW;AALM,CAAd;;AAQA,IAAMC,oBAAoB;AAC7BC,SAAK,KADwB;AAE7BC,QAAI;AAFyB,CAA1B,C;;;;;;;;;;;;;;;;;;;AChCP;AACA;;IAEMC,kB;AACF,kCAAc;AAAA;;AACV,aAAKC,KAAL,GAAa,IAAIC,GAAJ,EAAb;AACA,aAAKC,UAAL,CAAgB,KAAKC,qBAAL,EAAhB;AACH;;;;gDAEuB;AACpB,mBAAO,CACH,IAAIC,qEAAJ,EADG,EAEH,IAAIC,oEAAJ,EAFG,EAGH,IAAIC,gEAAJ,EAHG,EAIH,IAAIC,oEAAJ,EAJG,CAAP;AAMH;;AAED;;;;;;;;qCAK4B;AAAA;;AAAA,gBAAjBL,WAAiB,uEAAJ,EAAI;;AACxBA,wBAAWzE,OAAX,CAAmB;AAAA,uBAAa,MAAKuE,KAAL,CAAWQ,GAAX,CAAeC,UAAUC,IAAzB,EAA+BD,SAA/B,CAAb;AAAA,aAAnB;AACA,mBAAO,KAAKT,KAAZ;AACH;;AAED;;;;;;;;iCAKSS,S,EAAW;AAChB,gBAAIA,qBAAqBE,4DAAzB,EAAwC;AACpC,qBAAKX,KAAL,CAAWQ,GAAX,CAAeC,UAAUC,IAAzB,EAA+BD,SAA/B;AACA,uBAAO,IAAP;AACH;AACD,mBAAO,IAAP;AACH;;AAED;;;;;;;;mCAMWA,S,EAAW;AAClB,iBAAKT,KAAL,CAAWY,MAAX,CAAkBH,UAAUC,IAA5B;AACA,mBAAO,IAAP;AACH;;;4BAEG9F,I,EAAM;AACN,gBAAI,KAAKoF,KAAL,CAAWa,GAAX,CAAejG,IAAf,CAAJ,EAA0B;AACtB,uBAAO,KAAKoF,KAAL,CAAWc,GAAX,CAAelG,IAAf,CAAP;AACH;AACD,mBAAO,IAAP;AACH;;;;;;AAIL,IAAMmG,iBAAkB,YAAY;AAChC,QAAIf,QAAQ,IAAZ;;AAEA,aAASgB,QAAT,GAAqB;AACjBhB,gBAAQ,IAAID,kBAAJ,EAAR;AACA,eAAOC,KAAP;AACH;AACD,WAAOA,SAASgB,UAAhB;AACH,CARuB,EAAxB;;AAUeD,6EAAf,E;;;;;;;;;;;;;;;;;;;;;;;;ACvEA;AACA;AACA;;IAEqBR,iB;;;AACjB,iCAAc;AAAA;;AAAA,qIACJU,0DAAUA,CAACC,IADP;AAEb;;;;gCAEOC,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOH,oEAAIA,CAACC,IAAL,EAAWC,MAAX,EAAmBC,OAAnB,CAAP;AACH;;;;EAP0CV,4D;;AAA1BJ,gF;;;;;;;;;;;;;;;;;;;;;;;;ACJrB;AACA;AACA;;IAEqBF,iB;;;AACjB,iCAAc;AAAA;;AAAA,qIACJY,0DAAUA,CAACK,OADP;AAEb;;;;gCAEOH,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOE,8DAAMA,CAACJ,IAAP,EAAaC,MAAb,EAAqBC,OAArB,CAAP;AACH;;;;EAP0CV,4D;;AAA1BN,gF;;;;;;;;;;;;;;;;;;;;;;;;ACJrB;AACA;AACA;;IAEqBD,kB;;;AACjB,kCAAc;AAAA;;AAAA,uIACJa,0DAAUA,CAACO,OADP;AAEb;;;;gCAEOL,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOI,8DAAMA,CAACN,IAAP,EAAaC,MAAb,EAAqBC,OAArB,CAAP;AACH;;;;EAP2CV,4D;;AAA3BP,iF;;;;;;;;;;;;ACJrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;ACFA;AACA;AACA;;IAEqBE,a;;;AACjB,6BAAc;AAAA;;AAAA,6HACJW,0DAAUA,CAACS,SADP;AAEb;;;;gCAEOP,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOM,gEAAQA,CAACR,IAAT,EAAeC,MAAf,EAAuBC,OAAvB,CAAP;AACH;;;;EAPsCV,4D;;AAAtBL,4E;;;;;;;;;;;;ACJrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;;;;;;;;;;;;;;;;;;ACDA;;;IAGqBK,a;AACjB,2BAAYD,IAAZ,EAAkB;AAAA;;AACd,aAAKkB,KAAL,GAAalB,IAAb;AACH;;;;kCAMS;AACN,kBAAM,IAAImB,KAAJ,CAAU,iCAAV,CAAN;AACH;;;4BANU;AACP,mBAAO,KAAKD,KAAZ;AACH;;;;;;AAPgBjB,4E;;;;;;;;;;;;ACHrB;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;AAEA;;;;;;;AAOA,SAASmB,IAAT,CAAeX,IAAf,EAAqBC,MAArB,EAA6BC,OAA7B,EAAsC;AAClC,QAAMnB,aAAa,EAAEyB,4DAAF,EAAYF,wDAAZ,EAAoBF,wDAApB,EAAnB;AACA,QAAMQ,aAAaC,+DAAgBA,CAACb,IAAjB,CAAnB;;AAEA,QAAI,CAACY,UAAL,EAAiB;AACb,cAAM,IAAIF,KAAJ,CAAU,kCAAV,CAAN;AACH;;AAED,WAAO3B,WAAW6B,UAAX,EAAuBZ,IAAvB,EAA6BC,MAA7B,EAAqCC,OAArC,CAAP;AACH;;AAEcS,mEAAf,E;;;;;;;;;;;;;;;;ACvBA;;AAEA;;;;;;;;;;;;;;;;;AAiBA,SAASP,MAAT,CAAgBU,GAAhB,EAAqBb,MAArB,EAA6BC,OAA7B,EAAsC;AAClC,QAAI,CAACrF,MAAMkG,OAAN,CAAcd,MAAd,CAAL,EAA4B;AACxB,cAAM,IAAIS,KAAJ,CAAU,+CAAV,CAAN;AACH;AACD,QAAMM,gBAAgB;AAClBC,wBAAgB;AADE,KAAtB;AAGA,QAAMC,eAAejB,OAAOzG,GAAP,CAAW;AAAA,eAAc2H,WAAW1H,IAAzB;AAAA,KAAX,CAArB;AACAyG,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBJ,aAAlB,EAAiCd,OAAjC,CAAV;;AAEA,QAAM5G,UAAU,EAAhB;AACA,QAAMkB,OAAO6G,0DAAWA,CAAC/H,OAAZ,CAAb;;AAEA,QAAIgI,UAAUJ,YAAd;AACA,QAAIhB,QAAQe,cAAZ,EAA4B;AACxB;AACA;AACAK,kBAAUR,IAAIS,MAAJ,CAAW,CAAX,EAAc,CAAd,EAAiB,CAAjB,CAAV;AACH;AACD;AACA,QAAMC,YAAYF,QAAQG,MAAR,CAAe,UAACC,GAAD,EAAMC,CAAN,EAASjI,CAAT;AAAA,eAC7BU,OAAOgH,MAAP,CAAcM,GAAd,sBAAsBC,CAAtB,EAA0BjI,CAA1B,EAD6B;AAAA,KAAf,EAEf,EAFe,CAAlB;;AAIAoH,QAAIxG,OAAJ,CAAY,UAACsH,MAAD,EAAY;AACpB,YAAMC,QAAQ,EAAd;AACAX,qBAAa5G,OAAb,CAAqB,UAACwH,WAAD,EAAiB;AAClC,gBAAMC,YAAYP,UAAUM,WAAV,CAAlB;AACAD,kBAAMrH,IAAN,CAAWoH,OAAOG,SAAP,CAAX;AACH,SAHD;AAIA,eAAOvH,sBAAQqH,KAAR,CAAP;AACH,KAPD;AAQA,WAAO,CAACX,YAAD,EAAe5H,OAAf,CAAP;AACH;;AAEc8G,qEAAf,E;;;;;;;;;;;;ACtDA;AAAA;AAAA;AAAA;AACA;;AAEA;;;;;;;;;;;;;;;;;;;;;AAqBA,SAASE,MAAT,CAAiB0B,GAAjB,EAAsB/B,MAAtB,EAA8BC,OAA9B,EAAuC;AACnC,QAAMc,gBAAgB;AAClBC,wBAAgB,IADE;AAElBgB,wBAAgB;AAFE,KAAtB;AAIA/B,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBJ,aAAlB,EAAiCd,OAAjC,CAAV;;AAEA,QAAM7H,MAAM6J,wDAAKA,CAAChC,QAAQ+B,cAAd,CAAZ;AACA,WAAO7B,wDAAMA,CAAC/H,IAAII,SAAJ,CAAcuJ,GAAd,CAAP,EAA2B/B,MAA3B,EAAmCC,OAAnC,CAAP;AACH;;AAEcI,qEAAf,E;;;;;;;;;;;;ACnCA;AAAA;AAAA;;AAEA;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BA,SAASE,QAAT,CAAmBM,GAAnB,EAAwBb,MAAxB,EAAgC;AAC5B,QAAI,CAACpF,MAAMkG,OAAN,CAAcd,MAAd,CAAL,EAA4B;AACxB,cAAM,IAAIS,KAAJ,CAAU,+CAAV,CAAN;AACH;;AAED,QAAMyB,SAAS,EAAf;AACA,QAAIzI,IAAI,CAAR;AACA,QAAI0I,uBAAJ;AACA,QAAM9I,UAAU,EAAhB;AACA,QAAMkB,OAAO6G,0DAAWA,CAAC/H,OAAZ,CAAb;AACA,QAAM+I,mBAAmBpC,OAAOzG,GAAP,CAAW;AAAA,eAAc2H,WAAW1H,IAAzB;AAAA,KAAX,CAAzB;;AAEAqH,QAAIxG,OAAJ,CAAY,UAACgI,IAAD,EAAU;AAClB,YAAMV,SAAS,EAAf;AACAS,yBAAiB/H,OAAjB,CAAyB,UAAC6G,UAAD,EAAgB;AACrC,gBAAIA,cAAcgB,MAAlB,EAA0B;AACtBC,iCAAiBD,OAAOhB,UAAP,CAAjB;AACH,aAFD,MAEO;AACHgB,uBAAOhB,UAAP,IAAqBzH,GAArB;AACA0I,iCAAiB1I,IAAI,CAArB;AACH;AACDkI,mBAAOQ,cAAP,IAAyBE,KAAKnB,UAAL,CAAzB;AACH,SARD;AASA3G,8BAAQoH,MAAR;AACH,KAZD;;AAcA,WAAO,CAACxH,OAAOmI,IAAP,CAAYJ,MAAZ,CAAD,EAAsB7I,OAAtB,CAAP;AACH;;AAEckH,uEAAf,E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACzDA;;AAEA;AACA;AAYA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;;;;;;IAYMgC,S;;;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,yBAAsB;AAAA;;AAAA;;AAAA,0CAANC,IAAM;AAANA,gBAAM;AAAA;;AAAA,qJACTA,IADS;;AAGlB,cAAKC,cAAL,GAAsB,EAAtB;AAHkB;AAIrB;;AAED;;;;;;;;;;;;;;;AA0CA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;gCAgCSxC,O,EAAS;AACd,gBAAMyC,aAAa;AACfC,uBAAO,KADQ;AAEfC,2BAAW,IAFI;AAGfC,yBAAS,KAHM;AAIfC,8BAAc,KAJC;AAKfC,sBAAM;AALS,aAAnB;AAOA9C,sBAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBuB,UAAlB,EAA8BzC,OAA9B,CAAV;AACA,gBAAM0B,SAAS,KAAKqB,oBAAL,GAA4BrB,MAA3C;;AAEA,gBAAMsB,gBAAgBC,qDAAWA,CAACC,IAAZ,CAClB,IADkB,EAElB,KAAKH,oBAAL,GAA4BrB,MAFV,EAGlB,KAAKyB,WAHa,EAIlBnD,QAAQ6C,YAAR,GAAuBnB,OAAOpI,GAAP,CAAW;AAAA,uBAAK8J,EAAE7J,IAAF,EAAL;AAAA,aAAX,EAA0BI,IAA1B,EAAvB,GAA0D,KAAK0J,cAJ7C,EAKlBrD,QAAQ8C,IALU,EAMlB;AACIQ,4BAAYtD,QAAQ0C,KAAR,KAAkB,QADlC;AAEIa,wBAAQ,CAAC,CAACvD,QAAQ4C;AAFtB,aANkB,CAAtB;;AAYA,gBAAI,CAAC5C,QAAQ2C,SAAb,EAAwB;AACpB,uBAAOK,aAAP;AACH;;AAzBa,2BA2BQhD,OA3BR;AAAA,gBA2BN2C,SA3BM,YA2BNA,SA3BM;AAAA,gBA4BN7C,IA5BM,GA4BiBkD,aA5BjB,CA4BNlD,IA5BM;AAAA,gBA4BAC,MA5BA,GA4BiBiD,aA5BjB,CA4BAjD,MA5BA;AAAA,gBA4BQyD,IA5BR,GA4BiBR,aA5BjB,CA4BQQ,IA5BR;;AA6Bd,gBAAMC,aAAa1D,OAAOzG,GAAP,CAAY;AAAA,uBAAKoK,EAAEnK,IAAP;AAAA,aAAZ,CAAnB;AACA,gBAAMoK,gBAAgBzJ,OAAOmI,IAAP,CAAYM,SAAZ,CAAtB;AACA,gBAAMiB,cAAcD,cAAcpC,MAAd,CAAqB,UAACC,GAAD,EAAMqC,IAAN,EAAe;AACpD,oBAAMC,MAAML,WAAWM,OAAX,CAAmBF,IAAnB,CAAZ;AACA,oBAAIC,QAAQ,CAAC,CAAb,EAAgB;AACZtC,wBAAIlH,IAAJ,CAAS,CAACwJ,GAAD,EAAMnB,UAAUkB,IAAV,CAAN,CAAT;AACH;AACD,uBAAOrC,GAAP;AACH,aANmB,EAMjB,EANiB,CAApB;;AAQA,gBAAIxB,QAAQ0C,KAAR,KAAkB,QAAtB,EAAgC;AAC5BkB,4BAAYxJ,OAAZ,CAAoB,UAAC4J,IAAD,EAAU;AAC1B,wBAAMC,OAAOD,KAAK,CAAL,CAAb;AACA,wBAAME,QAAQF,KAAK,CAAL,CAAd;;AAEAlE,yBAAKmE,IAAL,EAAW7J,OAAX,CAAmB,UAAC+J,KAAD,EAAQC,QAAR,EAAqB;AACpCtE,6BAAKmE,IAAL,EAAWG,QAAX,IAAuBF,MAAMhB,IAAN,CACnBmB,SADmB,EAEnBF,KAFmB,EAGnBX,KAAKY,QAAL,CAHmB,EAInBrE,OAAOkE,IAAP,CAJmB,CAAvB;AAMH,qBAPD;AAQH,iBAZD;AAaH,aAdD,MAcO;AACHnE,qBAAK1F,OAAL,CAAa,UAAC+J,KAAD,EAAQC,QAAR,EAAqB;AAC9BR,gCAAYxJ,OAAZ,CAAoB,UAAC4J,IAAD,EAAU;AAC1B,4BAAMC,OAAOD,KAAK,CAAL,CAAb;AACA,4BAAME,QAAQF,KAAK,CAAL,CAAd;;AAEAG,8BAAMF,IAAN,IAAcC,MAAMhB,IAAN,CACVmB,SADU,EAEVF,MAAMF,IAAN,CAFU,EAGVT,KAAKY,QAAL,CAHU,EAIVrE,OAAOkE,IAAP,CAJU,CAAd;AAMH,qBAVD;AAWH,iBAZD;AAaH;;AAED,mBAAOjB,aAAP;AACH;;AAED;;;;;;;;kCAKW;AACP,gBAAMsB,aAAa,KAAKnB,WAAxB;AACA,gBAAMoB,MAAM,EAAZ;;AAEA,gBAAID,WAAW5J,MAAf,EAAuB;AACnB,oBAAM8J,WAAWF,WAAWG,KAAX,CAAiB,GAAjB,CAAjB;;AAEAD,yBAASpK,OAAT,CAAiB,UAAC+E,GAAD,EAAS;AAAA,yCACHA,IAAIsF,KAAJ,CAAU,GAAV,EAAenL,GAAf,CAAmBoL,MAAnB,CADG;AAAA;AAAA,wBACjBC,KADiB;AAAA,wBACVC,GADU;;AAGtBA,0BAAMA,QAAQP,SAAR,GAAoBO,GAApB,GAA0BD,KAAhC;AACAJ,wBAAIjK,IAAJ,+BAAYK,MAAMiK,MAAMD,KAAN,GAAc,CAApB,EAAuBE,IAAvB,GAA8BvL,GAA9B,CAAkC,UAACwL,CAAD,EAAIhB,GAAJ;AAAA,+BAAYa,QAAQb,GAApB;AAAA,qBAAlC,CAAZ;AACH,iBALD;AAMH;;AAED,mBAAOS,GAAP;AACH;AACD;;;;;;;;;;;;;;;;;;;;;;;;;;;gCAwBSQ,S,EAAwD;AAAA,gBAA7CC,QAA6C,uEAAlC,EAAkC;AAAA,gBAA9BC,MAA8B,uEAArB,EAAEC,WAAW,IAAb,EAAqB;;AAC7D,gBAAMC,qBAAmBJ,UAAUpL,IAAV,EAAzB;AACA,gBAAIyL,SAAS,CAAC,IAAD,EAAOL,SAAP,EAAkBC,QAAlB,CAAb;AACA,gBAAMK,eAAeC,mEAAWF,MAAX,CAArB;;AAEAG,8EAAkBA,CACd,IADJ,EAEIF,YAFJ,EAGI5H,yDAAcA,CAACG,OAHnB,EAII,EAAEmH,oBAAF,EAAaI,4BAAb,EAA4BK,gBAAgBC,4DAAYA,CAACD,cAAb,EAA5C,EAJJ,EAKIR,QALJ;;AAQA,gBAAIC,OAAOC,SAAX,EAAsB;AAClBG,6BAAaK,SAAb,CAAuB,IAAvB;AACH,aAFD,MAEO;AACHL,6BAAaK,SAAb,CAAuB,IAAvB;AACH;;AAED,mBAAOL,YAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6BAmDMM,c,EAA+C;AAAA,gBAA/BV,MAA+B,uEAAtB,EAAEC,WAAW,KAAb,EAAsB;;AACjD,gBAAMU,UAAU,KAAKC,OAAL,CAAa;AACzBnD,uBAAO,KADkB;AAEzBI,sBAAM6C;AAFmB,aAAb,CAAhB;AAIA,gBAAM1D,SAAS2D,QAAQ7F,MAAR,CAAezG,GAAf,CAAmB;AAAA,uBAASqI,MAAMpI,IAAf;AAAA,aAAnB,CAAf;AACA,gBAAMuM,eAAe,CAAC7D,MAAD,EAASlF,MAAT,CAAgB6I,QAAQ9F,IAAxB,CAArB;;AAEA,gBAAMiG,WAAW,IAAI,KAAKC,WAAT,CAAqBF,YAArB,EAAmCF,QAAQ7F,MAA3C,EAAmD,EAAEW,YAAY,QAAd,EAAnD,CAAjB;;AAEA6E,8EAAkBA,CACd,IADJ,EAEIQ,QAFJ,EAGItI,yDAAcA,CAACO,IAHnB,EAIIiH,MAJJ,EAKIU,cALJ;;AAQA,gBAAIV,OAAOC,SAAX,EAAsB;AAClBa,yBAASL,SAAT,CAAmB,IAAnB;AACH,aAFD,MAEO;AACHK,yBAASL,SAAT,CAAmB,IAAnB;AACH;;AAED,mBAAOK,QAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;kCAqBW1G,I,EAAMW,O,EAAS;AACtBX,mBAAOA,QAAQ,KAAK4G,WAApB;AACAjG,sBAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkB,EAAEa,gBAAgB,GAAlB,EAAlB,EAA2C/B,OAA3C,CAAV;;AAEA,gBAAM0B,SAAS,KAAKwE,aAAL,GAAqBxE,MAApC;AACA,gBAAMyE,UAAUzE,OAAOpI,GAAP,CAAW;AAAA,uBAAKO,EAAEuM,aAAF,EAAL;AAAA,aAAX,CAAhB;AACA,gBAAMC,YAAYF,QAAQ,CAAR,EAAWzL,MAA7B;AACA,gBAAI4L,uBAAJ;AACA,gBAAIC,eAAJ;AACA,gBAAIC,eAAJ;;AAEA,gBAAInH,SAASO,iDAAUA,CAACS,SAAxB,EAAmC;AAC/BiG,iCAAiB,EAAjB;AACA,qBAAKC,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,MAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,4BAAI4H,OAAO8E,MAAP,EAAejN,IAAf,EAAJ,IAA6B4M,QAAQK,MAAR,EAAgBD,MAAhB,CAA7B;AACH;AACDD,mCAAehM,IAAf,CAAoBR,GAApB;AACH;AACJ,aATD,MASO,IAAIuF,SAASO,iDAAUA,CAACO,OAAxB,EAAiC;AACpCmG,iCAAiB,CAAC5E,OAAOpI,GAAP,CAAW;AAAA,2BAAKO,EAAEN,IAAF,EAAL;AAAA,iBAAX,EAA0BI,IAA1B,CAA+BqG,QAAQ+B,cAAvC,CAAD,CAAjB;AACA,qBAAKwE,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,OAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,6BAAIQ,IAAJ,CAAS6L,QAAQK,MAAR,EAAgBD,MAAhB,CAAT;AACH;AACDD,mCAAehM,IAAf,CAAoBR,KAAIH,IAAJ,CAASqG,QAAQ+B,cAAjB,CAApB;AACH;AACDuE,iCAAiBA,eAAe3M,IAAf,CAAoB,IAApB,CAAjB;AACH,aAVM,MAUA,IAAI0F,SAASO,iDAAUA,CAACK,OAAxB,EAAiC;AACpCqG,iCAAiB,CAAC5E,OAAOpI,GAAP,CAAW;AAAA,2BAAKO,EAAEN,IAAF,EAAL;AAAA,iBAAX,CAAD,CAAjB;AACA,qBAAKgN,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,QAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,8BAAIQ,IAAJ,CAAS6L,QAAQK,MAAR,EAAgBD,MAAhB,CAAT;AACH;AACDD,mCAAehM,IAAf,CAAoBR,KAApB;AACH;AACJ,aATM,MASA;AACH,sBAAM,IAAI0G,KAAJ,gBAAuBnB,IAAvB,uBAAN;AACH;;AAED,mBAAOiH,cAAP;AACH;;;iCAES3E,K,EAAO;AACb,gBAAM8E,YAAY9E,MAAMpI,IAAN,EAAlB;AACA,iBAAK8J,cAAL,UAA2BoD,SAA3B;AACA,gBAAMC,oBAAoB,KAAKC,kBAA/B;AACA,gBAAMC,qBAAqBF,kBAAkBG,mBAA7C;AACA,gBAAMT,gBAAgBzE,MAAMyE,aAAN,EAAtB;AACA,gBAAMR,UAAUjE,MAAMmF,YAAN,CAAmBhH,IAAnC;;AAEA,gBAAI,CAAC4G,kBAAkBK,SAAlB,GAA8BpF,MAAMpI,IAAN,EAA9B,CAAL,EAAkD;AAC9CmN,kCAAkBhF,MAAlB,CAAyBpH,IAAzB,CAA8BqH,KAA9B;AACAiF,mCAAmBxM,OAAnB,CAA2B,UAAC4M,GAAD,EAAMxN,CAAN,EAAY;AACnCwN,wBAAIrF,MAAMpI,IAAN,EAAJ,IAAoB,IAAI0N,8CAAJ,CAAUb,cAAc5M,CAAd,CAAV,EAA4BoM,QAAQpM,CAAR,CAA5B,EAAwCmI,KAAxC,CAApB;AACH,iBAFD;AAGH,aALD,MAKO;AACH,oBAAMuF,aAAaR,kBAAkBhF,MAAlB,CAAyByF,SAAzB,CAAmC;AAAA,2BAAaC,UAAU7N,IAAV,OAAqBkN,SAAlC;AAAA,iBAAnC,CAAnB;AACAS,8BAAc,CAAd,KAAoBR,kBAAkBhF,MAAlB,CAAyBwF,UAAzB,IAAuCvF,KAA3D;AACH;;AAED;AACA+E,8BAAkBW,gBAAlB,GAAqC,IAArC;AACAX,8BAAkBY,gBAAlB,GAAqC,IAArC;AACAZ,8BAAkBa,cAAlB,GAAmC,IAAnC;;AAEA,iBAAKC,qBAAL,GAA6BC,qBAA7B;AACA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0CAoCmB1H,M,EAAQ2H,U,EAAYzC,M,EAAQ;AAAA;;AAC3ClF,qBAAS4H,kEAAkBA,CAAC5H,MAAnB,CAAT;AACAkF,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkB,EAAEgE,WAAW,IAAb,EAAmB0C,YAAY,KAA/B,EAAlB,EAA0D3C,MAA1D,CAAT;;AAEA,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;AACA,gBAAMC,UAAUL,WAAW/K,KAAX,CAAiB,CAAjB,EAAoB+K,WAAWhN,MAAX,GAAoB,CAAxC,CAAhB;AACA,gBAAMsN,aAAaN,WAAWA,WAAWhN,MAAX,GAAoB,CAA/B,CAAnB;;AAEA,gBAAImN,aAAa9H,OAAOxG,IAApB,KAA6B,CAAC0L,OAAO2C,UAAzC,EAAqD;AACjD,sBAAM,IAAIpH,KAAJ,CAAaT,OAAOxG,IAApB,wCAAN;AACH;;AAED,gBAAM0O,kBAAkBF,QAAQzO,GAAR,CAAY,UAACqI,KAAD,EAAW;AAC3C,oBAAMuG,YAAYL,aAAalG,KAAb,CAAlB;AACA,oBAAI,CAACuG,SAAL,EAAgB;AACZ;AACA,0BAAM,IAAI1H,KAAJ,CAAamB,KAAb,kCAAN;AACH;AACD,uBAAOuG,UAAUC,KAAjB;AACH,aAPuB,CAAxB;;AASA,gBAAMC,QAAQ,KAAKA,KAAL,CAAWnD,OAAOC,SAAlB,CAAd;;AAEA,gBAAMmD,KAAKD,MAAMlC,aAAN,GAAsBxE,MAAjC;AACA,gBAAM4G,iBAAiBL,gBAAgB3O,GAAhB,CAAoB;AAAA,uBAAO+O,GAAGvE,GAAH,CAAP;AAAA,aAApB,CAAvB;;AAEA,gBAAIyE,cAAc,EAAlB;AACA,gBAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,uBAAM,OAAKC,YAAL,EAAN;AAAA,aAApB;;AAEA,gBAAMC,iBAAiB,EAAvB;AACAC,gFAAkBA,CAACP,MAAMjF,WAAzB,EAAsC,UAAC3J,CAAD,EAAO;AACzC,oBAAMoP,aAAaN,eAAehP,GAAf,CAAmB;AAAA,2BAASqI,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAAT;AAAA,iBAAnB,CAAnB;AACAkP,+BAAelP,CAAf,IAAoBwO,+CAAcY,UAAd,UAA0BpP,CAA1B,EAA6BgP,aAA7B,EAA4CD,WAA5C,GAApB;AACH,aAHD;;AA9B2C,gCAkC3BM,mEAAYA,CAAC,CAACH,cAAD,CAAb,EAA+B,CAAC3I,MAAD,CAA/B,EAAyC,CAACA,OAAOxG,IAAR,CAAzC,CAlC2B;AAAA;AAAA,gBAkCpCoI,KAlCoC;;AAmC3CyG,kBAAMU,QAAN,CAAenH,KAAf;;AAEA4D,8EAAkBA,CACd,IADJ,EAEI6C,KAFJ,EAGI3K,yDAAcA,CAACK,OAHnB,EAII,EAAEmH,QAAQlF,MAAV,EAAkB2B,QAAQqG,OAA1B,EAJJ,EAKIC,UALJ;;AAQA,mBAAOI,KAAP;AACH;;AAED;;;;;;;;;;;kCAQWW,W,EAA2D;AAAA,gBAA9C9D,MAA8C,uEAArC,EAAqC;AAAA,gBAAjC+D,cAAiC;AAAA,gBAAjBC,UAAiB,uEAAJ,EAAI;;AAClE,gBAAMC,kBAAkBjE,OAAOiE,eAA/B;AACA,gBAAMC,sBAAsBlE,OAAOmE,QAAnC;AACA,gBAAMC,UAAUpE,OAAOoE,OAAvB;AACA,gBAAMC,YAAYC,gEAAgBA,CAAC,IAAjB,CAAlB;AACA,gBAAMC,uBAAuBF,UAAUG,qBAAvC;AACA,gBAAMC,mBAAmBC,mEAAmBA,CAAC,IAApB,CAAzB;AACA,gBAAMC,aAAa;AACfC,8BAAcH,gBADC;AAEfI,uBAAOR;AAFQ,aAAnB;;AAKAN,8BAAkBe,kEAAkBA,CAACP,oBAAnB,EAAyCvE,MAAzC,EAAiD,IAAjD,CAAlB;AACA+E,oFAAwBA,CAACjB,WAAzB,EAAsCa,UAAtC,EAAkD,EAAEJ,0CAAF,EAAwBJ,UAAUD,mBAAlC,EAAlD,EACIjP,OAAOgH,MAAP,CAAc;AACVmI;AADU,aAAd,EAEGpE,MAFH,CADJ;;AAKA,gBAAIiE,eAAJ,EAAqB;AACjBe,yFAAyBA,CAACT,oBAA1B,EAAgDI,UAAhD,EAA4D;AACxD3E,kCADwD;AAExDgE;AAFwD,iBAA5D,EAGG,IAHH;AAIH;;AAED,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;2BAOIiB,S,EAAWC,Q,EAAU;AACrB,oBAAQD,SAAR;AACA,qBAAK3M,sDAAL;AACI,yBAAKiF,cAAL,CAAoBlI,IAApB,CAAyB6P,QAAzB;AACA;AAHJ;AAKA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;oCAMaD,S,EAAW;AACpB,oBAAQA,SAAR;AACA,qBAAK3M,sDAAL;AACI,yBAAKiF,cAAL,GAAsB,EAAtB;AACA;;AAHJ;AAMA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;0CAOmB4H,S,EAAWf,O,EAAS;AAAA;;AACnC,gBAAIgB,gBAAgB,KAAK7H,cAAzB;AACA6H,0BAAcjQ,OAAd,CAAsB;AAAA,uBAAMkQ,GAAGpH,IAAH,CAAQ,MAAR,EAAckH,SAAd,EAAyBf,OAAzB,CAAN;AAAA,aAAtB;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BA2CKkB,gB,EAAkBtF,M,EAAQ;AAC3B,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;;AAEA,gBAAI,CAACD,aAAa0C,gBAAb,CAAL,EAAqC;AACjC,sBAAM,IAAI/J,KAAJ,YAAmB+J,gBAAnB,qBAAN;AACH;;AAED,gBAAMC,eAAevF,OAAO1L,IAAP,IAAkBgR,gBAAlB,YAArB;;AAEA,gBAAI1C,aAAa2C,YAAb,CAAJ,EAAgC;AAC5B,sBAAM,IAAIhK,KAAJ,YAAmBgK,YAAnB,qBAAN;AACH;;AAED,gBAAMC,eAAe,KAAKvE,aAAL,GAAqBa,SAArB,GAAiCwD,gBAAjC,CAArB;;AAb2B,wCAcEG,sFAAqBA,CAACD,YAAtB,EAAoC,KAAKtH,WAAzC,EAAsD8B,MAAtD,CAdF;AAAA,gBAcnB0F,UAdmB,yBAcnBA,UAdmB;AAAA,gBAcPC,IAdO,yBAcPA,IAdO;;AAgB3B,gBAAMC,WAAWhC,mEAAYA,CAAC,CAAC8B,UAAD,CAAb,EAA2B,CACxC;AACIpR,sBAAMiR,YADV;AAEInL,sBAAMyL,gDAASA,CAACC,SAFpB;AAGIC,yBAASC,uDAAgBA,CAACC,MAH9B;AAIIN;AAJJ,aADwC,CAA3B,EAMT,CAACJ,YAAD,CANS,EAMO,CANP,CAAjB;;AAQA,gBAAMpC,QAAQ,KAAKA,KAAL,CAAWnD,OAAOC,SAAlB,CAAd;AACAkD,kBAAMU,QAAN,CAAe+B,QAAf;;AAEAtF,8EAAkBA,CACd,IADJ,EAEI6C,KAFJ,EAGI3K,yDAAcA,CAACM,GAHnB,EAIK,EAAEwM,kCAAF,EAAoBtF,cAApB,EAA4BuF,0BAA5B,EAJL,EAKK,IALL;;AAQA,mBAAOpC,KAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;uCAuBgB;AACZ,gBAAMtI,OAAO,KAAKqL,SAAL,CAAevL,iDAAUA,CAACS,SAA1B,CAAb;AACA,gBAAMN,SAAS,KAAKqL,SAAL,EAAf;;AAEA,mBAAO,IAAI9I,SAAJ,CAAcxC,IAAd,EAAoBC,MAApB,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mCA0CYsL,Y,EAAcC,S,EAAWrG,M,EAAQ;AACzC,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;;AAEAuD,yBAAajR,OAAb,CAAqB,UAACqM,SAAD,EAAe;AAChC,oBAAI,CAACoB,aAAapB,SAAb,CAAL,EAA8B;AAC1B,0BAAM,IAAIjG,KAAJ,YAAmBiG,SAAnB,mCAAN;AACH;AACJ,aAJD;;AAMA,gBAAM8E,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;;AAKAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;;AAEA,mBAAO0G,+DAAeA,CAAC,IAAhB,EAAsBN,YAAtB,EAAoCC,SAApC,EAA+CrG,MAA/C,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;wCAoC6D;AAAA,gBAA9C2G,YAA8C,uEAA/B,EAA+B;AAAA,gBAA3BC,YAA2B,uEAAZ,EAAY;AAAA,gBAAR5G,MAAQ;;AACzD,gBAAMsG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;AAIA,gBAAM4G,cAAc,KAAKhE,eAAL,EAApB;AACA,gBAAMiE,YAAY7R,OAAOmI,IAAP,CAAYyJ,WAAZ,CAAlB;AACA,gBAAME,0BAA0B,CAAC,CAACH,YAAD,CAAD,CAAhC;;AAEA5G,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;AACA2G,2BAAeA,aAAalR,MAAb,GAAsBkR,YAAtB,GAAqC,CAAC,EAAD,CAApD;;AAGAA,yBAAaxR,OAAb,CAAqB,UAAC6R,QAAD,EAAWzS,CAAX,EAAiB;AAClCwS,wCAAwBxS,CAAxB,IAA6B0S,sEAAsBA,8BAC3CD,QADqB,sBACRJ,YADQ,IAEzBE,SAFyB,EAGzBD,WAHyB,CAA7B;AAIH,aALD;;AAOA,mBAAOK,gEAAgBA,CAAC,IAAjB,EAAuBH,uBAAvB,EAAgD/G,MAAhD,EAAwD8G,SAAxD,CAAP;AACH;;;;;AAhvBD;;;;;;;;;;;mDAWmC9G,M,EAAQ;AACvC,mBAAOmH,4DAAiBA,CAACC,gBAAlB,CAAmCpH,MAAnC,CAAP;AACH;;;4BA/BsB;AACnB,mBAAOQ,4DAAP;AACH;;AAED;;;;;;4BAGwB;AACpB,mBAAO/F,0DAAP;AACH;;AAED;;;;;;4BAGwB;AACpB,mBAAO4M,sDAAP;AACH;;;;EApEmBC,iD;;AA2zBTjK,wEAAf,E;;;;;;;;;;;;ACr2BA;AAAA;AAAA;;AAEe;AACX5B,gBAAYd,iDAAUA,CAACC;AADZ,CAAf,E;;;;;;;;;;;;ACFA;AAAA;;;;;;;AAOA,IAAMD,aAAa;AACfS,aAAW,UADI;AAEfF,WAAS,QAFM;AAGfF,WAAS,QAHM;AAIfJ,QAAM;AAJS,CAAnB;;AAOeD,yEAAf,E;;;;;;;;;;;;ACdA;AAAA;;;;;;AAMA,IAAMqL,mBAAmB;AACrBuB,eAAa,aADQ;AAErBC,YAAU,UAFW;AAGrBvB,UAAQ;AAHa,CAAzB;;AAMeD,+EAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;;AAOA,IAAMH,YAAY;AACd4B,WAAS,SADK;AAEd3B,aAAW;AAFG,CAAlB;;AAKeD,wEAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;AAMA,IAAMW,gBAAgB;AAClBC,UAAQ,QADU;AAElBiB,WAAS,SAFS;AAGlBC,OAAK;AAHa,CAAtB;;AAMenB,4EAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;AAMA,IAAMoB,qBAAqB;AACvBC,SAAK,KADkB;AAEvBC,SAAK,KAFkB;AAGvBC,SAAK,KAHkB;AAIvBC,SAAK,KAJkB;AAKvBC,WAAO,OALgB;AAMvBC,UAAM,MANiB;AAOvBC,WAAO,OAPgB;AAQvBC,SAAK;AARkB,CAA3B;;AAWeR,iFAAf,E;;;;;;;;;;;;ACjBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;;;;;;;AASA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;ACbA;AAAA;;;;;;AAMA,IAAMS,iBAAiB;AACnBC,cAAY;AADO,CAAvB;;AAIeD,6EAAf,E;;;;;;;;;;;;ACVA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAiBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,IAAME,YAAY;AACdC,8DADc;AAEdC,sDAFc;AAGdC,4DAHc;AAIdC,8DAJc;AAKdtI,8DALc;AAMduI,kFANc;AAOd/K,wDAPc;AAQdgL,wEARc;AASdC,oEATc;AAUdC,sEAVc;AAWdC,0EAXc;AAYdC,4EAZc;AAadC,0EAbc;AAcdC,0DAdc;AAedzF,oFAAkBA;AAfJ,CAAlB;;AAkBA,IAAM0F,UAAUC,0CAAGA,CAACD,OAApB;AACAnU,OAAOgH,MAAP,CAAcoB,kDAAd,EAAyB;AACrBkL,wBADqB;AAErBe,8CAFqB;AAGrB9Q,6EAHqB;AAIrB+Q,+EAJqB;AAKrB5O,qEALqB;AAMrB6L,2EANqB;AAOrBW,mFAPqB;AAQrBiC,oBARqB;AASrB/O,2EATqB;AAUrBmP,uDAAaA;AAVQ,CAAzB,EAWGC,mCAXH;;AAaepM,iHAAf,E;;;;;;;;;;;;AC3DA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;AAEA;;;;;;;AAOA,SAASqM,eAAT,CAAyB7O,IAAzB,EAA+BC,MAA/B,EAAuC;AACnCD,WAAOA,QAAQ,EAAf;;AAEA,QAAIwM,qDAAaA,CAAC9M,GAAd,CAAkBO,OAAOiL,OAAzB,CAAJ,EAAuC;AACnC,eAAOsB,qDAAaA,CAAC7M,GAAd,CAAkBM,OAAOiL,OAAzB,EACU4D,OADV,CAEUnI,SAFV,CAEoB1G,OAAOxG,IAF3B,EAGUwG,MAHV,CAGiBA,MAHjB,EAIUD,IAJV,CAIeA,IAJf,EAKUwE,UALV,SAK0BxE,KAAKpF,MAAL,GAAc,CALxC,GAMUmU,KANV,EAAP;AAOH;AACD,WAAOvC,qDAAaA,CACH7M,GADV,CACcM,OAAOV,IAAP,KAAgByL,gDAASA,CAAC4B,OAA1B,GAAoCY,qDAAcA,CAACC,UAAnD,GAAgEtC,uDAAgBA,CAACuB,WAD/F,EAEUoC,OAFV,CAGUnI,SAHV,CAGoB1G,OAAOxG,IAH3B,EAIUwG,MAJV,CAIiBA,MAJjB,EAKUD,IALV,CAKeA,IALf,EAMUwE,UANV,SAM0BxE,KAAKpF,MAAL,GAAc,CANxC,GAOUmU,KAPV,EAAP;AAQH;;AAGD;;;;;;;AAOO,SAASC,0BAAT,CAAoChI,YAApC,EAAkDxC,UAAlD,EAA8D;AAAA,QACzDvE,MADyD,GAC9C+G,YAD8C,CACzD/G,MADyD;;;AAGjE,QAAIuM,qDAAaA,CAAC9M,GAAd,CAAkBO,OAAOiL,OAAzB,CAAJ,EAAuC;AACnC,eAAOsB,qDAAaA,CAAC7M,GAAd,CAAkBM,OAAOiL,OAAzB,EACU4D,OADV,CAEU9H,YAFV,CAEuBA,YAFvB,EAGUxC,UAHV,CAGqBA,UAHrB,EAIUuK,KAJV,EAAP;AAKH;AACD,WAAOvC,qDAAaA,CACH7M,GADV,CACcM,OAAOV,IAAP,KAAgByL,gDAASA,CAAC4B,OAA1B,GAAoCY,qDAAcA,CAACC,UAAnD,GAAgEtC,uDAAgBA,CAACuB,WAD/F,EAEUoC,OAFV,CAGU9H,YAHV,CAGuBA,YAHvB,EAIUxC,UAJV,CAIqBA,UAJrB,EAKUuK,KALV,EAAP;AAMH;;AAED;;;;;;;;AAQO,SAAShG,YAAT,CAAsBkG,UAAtB,EAAkChP,MAAlC,EAA0CqB,OAA1C,EAAmD;AACtD,QAAM4N,aAAa,EAAnB;;AAEA,QAAI,EAAE5N,WAAWA,QAAQ1G,MAArB,CAAJ,EAAkC;AAC9B0G,kBAAUrB,OAAOzG,GAAP,CAAW;AAAA,mBAAQ8I,KAAK7I,IAAb;AAAA,SAAX,CAAV;AACH;;AAED6H,YAAQhH,OAAR,CAAgB,UAAC6H,MAAD,EAASzI,CAAT,EAAe;AAC3BwV,mBAAW/M,MAAX,IAAqBzI,CAArB;AACH,KAFD;;AAIA,WAAOuG,OAAOzG,GAAP,CAAW;AAAA,eAAQqV,gBAAgBI,WAAWC,WAAW5M,KAAK7I,IAAhB,CAAX,CAAhB,EAAmD6I,IAAnD,CAAR;AAAA,KAAX,CAAP;AACH,C;;;;;;;;;;;;AC9ED;AAAA;AAAA;AAAA;AACA;;AAEA,IAAM6M,aAAa;AACfnP,UAAM,EADS;;AAGfoP,mBAHe,2BAGEC,QAHF,EAGY5V,IAHZ,EAGkB;AAC7B,YAAM6V,SAAS7V,QAAQ8V,0DAAWA,EAAlC;;AAEA,aAAKvP,IAAL,CAAUsP,MAAV,IAAoB;AAChB7V,kBAAM6V,MADU;AAEhB1N,oBAAQyN,QAFQ;;AAIhBpI,qBAJgB,uBAIH;AACT,oBAAIA,YAAY,KAAKM,gBAArB;;AAEA,oBAAI,CAACN,SAAL,EAAgB;AACZA,gCAAY,KAAKM,gBAAL,GAAwB,EAApC;AACA,yBAAK3F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3BoF,kCAAUpF,MAAMpI,IAAN,EAAV,IAA0BoI,KAA1B;AACH,qBAFD;AAGH;AACD,uBAAOoF,SAAP;AACH,aAde;AAehBuI,sBAfgB,wBAeF;AACV,oBAAIC,gBAAgB,KAAKhI,cAAzB;;AAEA,oBAAI,CAACgI,aAAL,EAAoB;AAChBA,oCAAgB,KAAKhI,cAAL,GAAsB,EAAtC;AACA,yBAAK7F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3B,4BAAIA,MAAM5B,MAAN,GAAeV,IAAf,KAAwByL,gDAASA,CAAC4B,OAAtC,EAA+C;AAC3C6C,0CAAc5N,MAAMpI,IAAN,EAAd,IAA8BoI,KAA9B;AACH;AACJ,qBAJD;AAKH;AACD,uBAAO4N,aAAP;AACH,aA3Be;AA4BhBC,wBA5BgB,0BA4BA;AACZ,oBAAIC,kBAAkB,KAAKnI,gBAA3B;;AAEA,oBAAI,CAAC,KAAKA,gBAAV,EAA4B;AACxBmI,sCAAkB,KAAKnI,gBAAL,GAAwB,EAA1C;AACA,yBAAK5F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3B,4BAAIA,MAAM5B,MAAN,GAAeV,IAAf,KAAwByL,gDAASA,CAACC,SAAtC,EAAiD;AAC7C0E,4CAAgB9N,MAAMpI,IAAN,EAAhB,IAAgCoI,KAAhC;AACH;AACJ,qBAJD;AAKH;AACD,uBAAO8N,eAAP;AACH;AAxCe,SAApB;AA0CA,eAAO,KAAK3P,IAAL,CAAUsP,MAAV,CAAP;AACH;AAjDc,CAAnB;;AAoDeH,yEAAf,E;;;;;;;;;;;;;;;;;;;;;;;ACvDA;AACA;;AAEA;;;;;;;;IAOqBS,M;;;;;;;;;;;;AACjB;;;;;;;8CAOuB;AACnB,gBAAMC,UAAU,KAAK7I,YAAL,CAAkB/G,MAAlB,CAAyB6K,IAAzC;AACA,mBAAO,CAAC+E,QAAQ,CAAR,CAAD,EAAaA,QAAQA,QAAQjV,MAAR,GAAiB,CAAzB,CAAb,CAAP;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAKoM,YAAL,CAAkB/G,MAAlB,CAAyB6K,IAAhC;AACH;;;iCAEe;AACZ,mBAAO,IAAIgF,8DAAJ,EAAP;AACH;;;;EAzB+BC,kD;;AAAfH,qE;;;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;AACA;AACA;AACA;;;;;;;;IAOqBI,W;;;;;;;;;;;;AACjB;;;;;;;kCAOW;AACP,mBAAO7E,uDAAgBA,CAACuB,WAAxB;AACH;;AAED;;;;;;;;;;8CAOuB;AAAA;;AACnB,gBAAMuD,OAAO,IAAIC,GAAJ,EAAb;AACA,gBAAMC,SAAS,EAAf;;AAEA;AACAtH,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI,CAACuW,KAAKvQ,GAAL,CAAS2E,KAAT,CAAL,EAAsB;AAClB4L,yBAAKG,GAAL,CAAS/L,KAAT;AACA8L,2BAAO3V,IAAP,CAAY6J,KAAZ;AACH;AACJ,aAND;AAOA,mBAAO8L,MAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIE,mEAAJ,EAAP;AACH;;;;EApCoCN,kD;;AAApBC,0E;;;;;;;;;;;;;;;;;;;;;;;;;;ACXrB;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;IAOqBM,U;;;;;;;;;;;;AACjB;;;;;;;kCAOW;AACP,mBAAO9C,qDAAcA,CAACC,UAAtB;AACH;;AAED;;;;;;;;;;8CAOuB;AAAA;;AACnB,gBAAI8C,MAAM3L,OAAO4L,iBAAjB;AACA,gBAAIC,MAAM7L,OAAO8L,iBAAjB;;AAEA;AACA7H,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI2K,iBAAiBiI,4DAArB,EAAwC;AACpC;AACH;;AAED,oBAAIjI,QAAQkM,GAAZ,EAAiB;AACbA,0BAAMlM,KAAN;AACH;AACD,oBAAIA,QAAQoM,GAAZ,EAAiB;AACbA,0BAAMpM,KAAN;AACH;AACJ,aAZD;;AAcA,mBAAO,CAACkM,GAAD,EAAME,GAAN,CAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIE,kEAAJ,EAAP;AACH;;;;EA3CmCC,gD;;AAAnBN,yE;;;;;;;;;;;;;;;;;;;;;;ACbrB;;AAEA;;;;;;;;IAOqBP,S;;;;;;;;;;;;AACjB;;;;;;;iCAOU;AACN,gBAAI,CAAC,KAAKc,aAAV,EAAyB;AACrB,qBAAKA,aAAL,GAAqB,KAAKC,mBAAL,EAArB;AACH;AACD,mBAAO,KAAKD,aAAZ;AACH;;AAED;;;;;;;;;8CAMuB;AACnB,kBAAM,IAAInQ,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAEA;;;;;;;;;;wCAOgB;AACb,mBAAO,KAAKV,IAAL,EAAP;AACH;;;;EAlCkC+Q,8C;;AAAlBhB,wE;;;;;;;;;;;;;;;;;;;;;;ACTrB;AACA;AACA;AACA;AACA;;IAGMiB,iB;AACF,iCAAc;AAAA;;AACV,aAAKC,UAAL,GAAkB,IAAInS,GAAJ,EAAlB;AACH;;;;0CAEiBoM,O,EAASgG,S,EAAW;AAClC,iBAAKD,UAAL,CAAgB5R,GAAhB,CAAoB6L,OAApB,EAA6BgG,SAA7B;AACA,mBAAO,IAAP;AACH;;;4BAEG3R,I,EAAM;AACN,mBAAO,KAAK0R,UAAL,CAAgBvR,GAAhB,CAAoBH,IAApB,CAAP;AACH;;;4BAEGA,I,EAAM;AACN,mBAAO,KAAK0R,UAAL,CAAgBtR,GAAhB,CAAoBJ,IAApB,CAAP;AACH;;;;;;AAGL,IAAM4R,wBAAwB,SAAxBA,qBAAwB,CAACtS,KAAD,EAAW;AACrCA,UACiBuS,iBADjB,CACmCjG,uDAAgBA,CAACuB,WADpD,EACiEsD,oDADjE,EAEiBoB,iBAFjB,CAEmCjG,uDAAgBA,CAACwB,QAFpD,EAE8D0E,iDAF9D,EAGiBD,iBAHjB,CAGmCjG,uDAAgBA,CAACC,MAHpD,EAG4DwE,+CAH5D,EAIiBwB,iBAJjB,CAImC5D,qDAAcA,CAACC,UAJlD,EAI8D6C,mDAJ9D;AAKH,CAND;;AAQA,IAAM9D,gBAAiB,YAAY;AAC/B,QAAI3N,QAAQ,IAAZ;AACA,aAASgB,QAAT,GAAqB;AACjBhB,gBAAQ,IAAImS,iBAAJ,EAAR;AACAG,8BAAsBtS,KAAtB;AACA,eAAOA,KAAP;AACH;AACD,WAAOA,SAASgB,UAAhB;AACH,CARsB,EAAvB;;AAUe2M,4EAAf,E;;;;;;;;;;;;;;;;;;;AC5CA;AACA;;AAEA;;;;;;;;;;;;;;;;;;;;IAmBqBuE,K;AACjB;;;;;;;AAOA,mBAAa/J,YAAb,EAA2BxC,UAA3B,EAAuC;AAAA;;AACnC,aAAKwC,YAAL,GAAoBA,YAApB;AACA,aAAKxC,UAAL,GAAkBA,UAAlB;AACH;;;;;;AAMD;;;;;;iCAMU;AACN,kBAAM,IAAI9D,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAED;;;;;;;;;iCAMU;AACN,mBAAO,KAAKsG,YAAL,CAAkB/G,MAAzB;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAK+G,YAAL,CAAkBvN,IAAzB;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAKuN,YAAL,CAAkB/G,MAAlB,CAAyBV,IAAhC;AACH;;AAED;;;;;;;;;kCAMW;AACP,mBAAO,KAAKyH,YAAL,CAAkB/G,MAAlB,CAAyBiL,OAAhC;AACH;;AAED;;;;;;;;;sCAMe;AACX,mBAAO,KAAKlE,YAAL,CAAkB/G,MAAlB,CAAyBqR,WAAhC;AACH;;AAED;;;;;;;;;sCAMe;AACX,mBAAO,KAAKtK,YAAL,CAAkB/G,MAAlB,CAAyBsR,WAAzB,IAAwC,KAAKvK,YAAL,CAAkB/G,MAAlB,CAAyBxG,IAAxE;AACH;;AAED;;;;;;;;;+BAMQ;AAAA;;AACJ,gBAAMuG,OAAO,EAAb;AACA6I,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvCsG,qBAAKxF,IAAL,CAAU,MAAKwM,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAV;AACH,aAFD;AAGA,mBAAOsG,IAAP;AACH;;AAED;;;;;;;;;wCAMiB;AACb,kBAAM,IAAIU,KAAJ,CAAU,qBAAV,CAAN;AACH;;;iCAhGe;AACZ,kBAAM,IAAIA,KAAJ,CAAU,qBAAV,CAAN;AACH;;;4BAgGoB;AACjB,gBAAM8Q,UAAU;AACZC,yBAAS,EADG;AAEZC,0BAAU,IAFE;AAGZ/K,yBAHY,qBAGFlN,IAHE,EAGI;AACZ,yBAAKgY,OAAL,CAAahY,IAAb,GAAoBA,IAApB;AACA,2BAAO,IAAP;AACH,iBANW;AAOZwG,sBAPY,kBAOLA,OAPK,EAOG;AACX,yBAAKwR,OAAL,CAAaxR,MAAb,GAAsBA,OAAtB;AACA,2BAAO,IAAP;AACH,iBAVW;AAWZD,oBAXY,gBAWPA,KAXO,EAWD;AACP,yBAAKyR,OAAL,CAAazR,IAAb,GAAoBA,KAApB;AACA,2BAAO,IAAP;AACH,iBAdW;AAeZgH,4BAfY,wBAeCA,aAfD,EAee;AACvB,yBAAKyK,OAAL,CAAazK,YAAb,GAA4BA,aAA5B;AACA,2BAAO,IAAP;AACH,iBAlBW;AAmBZxC,0BAnBY,sBAmBDA,WAnBC,EAmBW;AACnB,yBAAKiN,OAAL,CAAajN,UAAb,GAA0BA,WAA1B;AACA,2BAAO,IAAP;AACH,iBAtBW;AAuBZuK,qBAvBY,mBAuBJ;AACJ,wBAAI/H,eAAe,IAAnB;AACA,wBAAI,KAAKyK,OAAL,CAAazK,YAAb,YAAqC2K,sDAAzC,EAAuD;AACnD3K,uCAAe,KAAKyK,OAAL,CAAazK,YAA5B;AACH,qBAFD,MAEO,IAAI,KAAKyK,OAAL,CAAaxR,MAAb,IAAuB,KAAKwR,OAAL,CAAazR,IAAxC,EAA8C;AACjDgH,uCAAe,IAAI2K,sDAAJ,CAAiB,KAAKF,OAAL,CAAahY,IAA9B,EACK,KAAKgY,OAAL,CAAazR,IADlB,EAEK,KAAKyR,OAAL,CAAaxR,MAFlB,EAGK,KAAKyR,QAAL,CAAcE,MAAd,EAHL,CAAf;AAIH,qBALM,MAMF;AACD,8BAAM,IAAIlR,KAAJ,CAAU,0BAAV,CAAN;AACH;AACD,2BAAO,IAAI,KAAKgR,QAAT,CAAkB1K,YAAlB,EAAgC,KAAKyK,OAAL,CAAajN,UAA7C,CAAP;AACH;AArCW,aAAhB;AAuCA,mBAAOgN,OAAP;AACH;;;;;;AAxJgBT,oE;;;;;;;;;;;;ACtBrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;ACHA;AACA;AACA;;AAEA;;;;;;;;IAOqBH,O;;;;;;;;;;;;AACnB;;;;;;;iCAOY;AACN,gBAAI,CAAC,KAAKC,aAAV,EAAyB;AACrB,qBAAKA,aAAL,GAAqB,KAAKC,mBAAL,EAArB;AACH;AACD,mBAAO,KAAKD,aAAZ;AACH;;AAEH;;;;;;;;;+BAMU;AACJ,mBAAO,KAAK7J,YAAL,CAAkB/G,MAAlB,CAAyB4R,IAAhC;AACH;;AAEH;;;;;;;;;mCAMc;AACR,mBAAO,KAAK7K,YAAL,CAAkB/G,MAAlB,CAAyB6R,QAAzB,IAAqCC,8EAA5C;AACH;;AAEH;;;;;;;;;uCAMkB;AAAA,gBACJC,YADI,GACa,KAAKhL,YAAL,CAAkB/G,MAD/B,CACJ+R,YADI;;AAEZ,mBAAOA,wBAAwBzY,QAAxB,GAAmCyY,YAAnC,GAAkDC,mDAAzD;AACH;;AAEH;;;;;;;;;8CAMyB;AACnB,kBAAM,IAAIvR,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAED;;;;;;;;;;wCAOiB;AACb,mBAAO,KAAKV,IAAL,EAAP;AACH;;;;EAjEgC+Q,8C;;AAAhBH,sE;;;;;;;;;;;;;;;;;;;;;;;ACXrB;AACA;;AAEA;;;;;;;;IAOqBd,Y;;;;;;;;;;;;AACnB;;;;;;;8BAOSoC,G,EAAK;AACR,gBAAMC,QAAQ,yDAAd;AACAD,kBAAME,OAAOF,GAAP,CAAN;AACA,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAC/F,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIK,UAAUL,IAAIM,KAAJ,CAAUL,KAAV,CAAd;AACAE,yBAASE,UAAa3N,OAAO6N,UAAP,CAAkBF,QAAQ,CAAR,CAAlB,CAAb,SAA8C3N,OAAO6N,UAAP,CAAkBF,QAAQ,CAAR,CAAlB,CAA9C,GACUjG,4DAAiBA,CAACoG,EADrC;AAEH,aAJD,MAIO;AACHL,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EArBqCO,qD;;AAArB9C,2E;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;;AAEA;;;;;;;;IAOqBO,iB;;;;;;;;;;;;AACnB;;;;;;;8BAOS6B,G,EAAK;AACR,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAC/F,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnCG,yBAASD,OAAOF,GAAP,EAAYpa,IAAZ,EAAT;AACH,aAFD,MAEO;AACHua,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAjB0CO,qD;;AAA1BvC,gF;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;;AAEA;;;;;;;;IAOqBM,gB;;;;;;;;;;;;AACnB;;;;;;;8BAOSuB,G,EAAK;AACR,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAC/F,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIW,YAAYJ,WAAWP,GAAX,EAAgB,EAAhB,CAAhB;AACAG,yBAASzN,OAAO3M,KAAP,CAAa4a,SAAb,IAA0BvG,4DAAiBA,CAACoG,EAA5C,GAAiDG,SAA1D;AACH,aAHD,MAGO;AACHR,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAlByCO,qD;;AAAzBjC,+E;;;;;;;;;;;;;;;;;ACVrB;;;;;;IAMqBiC,W;;;;;;;;AACjB;;;;;;4BAMS;AACL,YAAM,IAAIlS,KAAJ,CAAU,qBAAV,CAAN;AACH;;;;;;AATgBkS,0E;;;;;;;;;;;;;;;;;;;;;;;;ACNrB;AACA;AACA;;AAEA;;;;;;;;IAOqBE,c;;;;;;;;;;;;;AAEjB;;;;;;;8BAOOZ,G,QAAiB;AAAA,gBAAVvZ,MAAU,QAAVA,MAAU;;AACpB,gBAAI0Z,eAAJ;AACA;AACA,gBAAI,CAAC,KAAKU,IAAV,EAAgB;AACZ,qBAAKA,IAAL,GAAY,IAAIrE,wDAAJ,CAAsB/V,MAAtB,CAAZ;AACH;AACD,gBAAI,CAAC2T,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIc,aAAa,KAAKD,IAAL,CAAUE,aAAV,CAAwBf,GAAxB,CAAjB;AACAG,yBAASW,aAAaA,WAAWE,OAAX,EAAb,GAAoC5G,4DAAiBA,CAACoG,EAA/D;AACH,aAHD,MAGO;AACHL,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAtBuCO,qD;;AAAvBE,6E;;;;;;;;;;;;;;;;;ACXrB;;;;;;;;IAQqBnB,Y;AACjB;;;;;;;;;AASA,wBAAalY,IAAb,EAAmBuG,IAAnB,EAAyBC,MAAzB,EAAiC2R,MAAjC,EAAyC;AAAA;;AACrC,SAAKnY,IAAL,GAAYA,IAAZ;AACA,SAAKwG,MAAL,GAAcA,MAAd;AACA,SAAK2R,MAAL,GAAcA,MAAd;AACA,SAAK5R,IAAL,GAAY,KAAKmT,SAAL,CAAenT,IAAf,CAAZ;AACH;;AAED;;;;;;;;;;;8BAOWA,I,EAAM;AAAA;;AACb,aAAOA,KAAKxG,GAAL,CAAS;AAAA,eAAS,MAAKoY,MAAL,CAAYrZ,KAAZ,CAAkB8L,KAAlB,EAAyB,EAAE1L,QAAQ,MAAKsH,MAAL,CAAYtH,MAAtB,EAAzB,CAAT;AAAA,OAAT,CAAP;AACH;;;;;;AA1BgBgZ,2E;;;;;;;;;;;;;;;;;;;;;;;;;;ACRrB;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;IAOqBN,Q;;;AAChB;;;;;;;AAOD,sBAAarK,YAAb,EAA2BxC,UAA3B,EAAuC;AAAA;;AAAA,wHAC7BwC,YAD6B,EACfxC,UADe;;AAGnC,cAAK4O,cAAL,GAAsB,IAAtB;AAHmC;AAItC;;AAEA;;;;;;;;;;;8CAOsB;AAAA;;AACnB,gBAAMnD,OAAO,IAAIC,GAAJ,EAAb;AACA,gBAAMC,SAAS,EAAf;;AAEA;AACA;AACAtH,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI,CAACuW,KAAKvQ,GAAL,CAAS2E,KAAT,CAAL,EAAsB;AAClB4L,yBAAKG,GAAL,CAAS/L,KAAT;AACA8L,2BAAO3V,IAAP,CAAY6J,KAAZ;AACH;AACJ,aAND;;AAQA,mBAAO8L,MAAP;AACH;;AAGD;;;;;;;;;uDAMgC;AAC5B,gBAAI,KAAKiD,cAAT,EAAyB;AACrB,uBAAO,KAAKA,cAAZ;AACH;;AAED,gBAAMC,aAAa,KAAKrT,IAAL,GAAYsT,MAAZ,CAAmB;AAAA,uBAAQ,EAAEhR,gBAAgBgK,4DAAlB,CAAR;AAAA,aAAnB,EAAiEtJ,IAAjE,CAAsE,UAACuQ,CAAD,EAAIC,CAAJ;AAAA,uBAAUD,IAAIC,CAAd;AAAA,aAAtE,CAAnB;AACA,gBAAMC,QAAQJ,WAAWzY,MAAzB;AACA,gBAAI8Y,UAAU9O,OAAO4L,iBAArB;AACA,gBAAImD,kBAAJ;AACA,gBAAIC,kBAAJ;AACA,gBAAIC,iBAAiB,CAArB;;AAEA,iBAAK,IAAIna,IAAI,CAAb,EAAgBA,IAAI+Z,KAApB,EAA2B/Z,GAA3B,EAAgC;AAC5Bia,4BAAYN,WAAW3Z,IAAI,CAAf,CAAZ;AACAka,4BAAYP,WAAW3Z,CAAX,CAAZ;;AAEA,oBAAIka,cAAcD,SAAlB,EAA6B;AACzB;AACH;;AAEDD,0BAAUI,KAAKvD,GAAL,CAASmD,OAAT,EAAkBE,YAAYP,WAAW3Z,IAAI,CAAf,CAA9B,CAAV;AACAma;AACH;;AAED,gBAAI,CAACA,cAAL,EAAqB;AACjBH,0BAAU,IAAV;AACH;AACD,iBAAKN,cAAL,GAAsBM,OAAtB;;AAEA,mBAAO,KAAKN,cAAZ;AACH;;AAED;;;;;;;;;iCAMU;AACN,mBAAO,KAAKpM,YAAL,CAAkB/G,MAAlB,CAAyBtH,MAAhC;AACH;;AAED;;;;;;;;;;wCAOiB;AAAA;;AACb,gBAAMqH,OAAO,EAAb;AACA,gBAAMY,aAAa,KAAKjI,MAAL,EAAnB;;AAEAkQ,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA;AACA,oBAAI4S,4DAAiBA,CAACgG,SAAlB,CAA4BjO,KAA5B,KAAuC,CAACzD,UAAD,IAAegE,OAAOmP,QAAP,CAAgB1P,KAAhB,CAA1D,EAAmF;AAC/E;AACA,wBAAM2P,cAAc1H,4DAAiBA,CAACqG,cAAlB,CAAiCtO,KAAjC,KAA2CA,KAA/D;AACArE,yBAAKxF,IAAL,CAAUwZ,WAAV;AACH,iBAJD,MAIO;AACHhU,yBAAKxF,IAAL,CAAUkU,wDAAiBA,CAACuF,QAAlB,CAA2B5P,KAA3B,EAAkCzD,UAAlC,CAAV;AACH;AACJ,aAVD;AAWA,mBAAOZ,IAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAI8S,gEAAJ,EAAP;AACH;;;;EAlHiC/C,kD;;AAAjBsB,uE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACbrB;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;AAGA,SAAS6C,oBAAT,CAA+BtS,MAA/B,EAAuC0E,aAAvC,EAAsDR,OAAtD,EAA+DpM,CAA/D,EAAkE;AAC9D,QAAMya,OAAO,EAAb;;AAD8D;AAAA;AAAA;;AAAA;AAG9D,6BAA2BvS,OAAOwS,OAAP,EAA3B,8HAA6C;AAAA;;AAAA;;AAAA,gBAAjCxc,GAAiC;AAAA,gBAA5BiK,KAA4B;;AACzCsS,iBAAKtS,MAAMpI,IAAN,EAAL,IAAqB,IAAI0N,8CAAJ,CAAUb,cAAc1O,GAAd,EAAmB8B,CAAnB,CAAV,EAAiCoM,QAAQlO,GAAR,EAAa8B,CAAb,CAAjC,EAAkDmI,KAAlD,CAArB;AACH;AAL6D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAM9D,WAAOsS,IAAP;AACH;;AAEM,SAASE,eAAT,CAA0BzS,MAA1B,EAAkC;AACrC,QAAMuS,OAAO,EAAb;;AAEA,SAAK,IAAMvc,GAAX,IAAkBgK,MAAlB,EAA0B;AACtBuS,aAAKvc,GAAL,IAAY,IAAIuP,8CAAJ,CAAUvF,OAAOhK,GAAP,EAAY0c,cAAtB,EAAsC1S,OAAOhK,GAAP,EAAY2c,QAAlD,EAA4D3c,GAA5D,CAAZ;AACH;AACD,WAAOuc,IAAP;AACH;;AAEM,IAAMK,eAAe,SAAfA,YAAe,QAA8B5N,iBAA9B,EAAiD6N,cAAjD,EAAoE;AAAA;AAAA,QAAlEjQ,UAAkE;AAAA,QAAtDkQ,aAAsD;;AAC5F,QAAIC,SAASD,cAAc9Z,MAAd,GAAuB8Z,cAAc/P,KAAd,CAAoB,GAApB,CAAvB,GAAkD,EAA/D;AACA,QAAIiQ,kBAAkBhO,kBAAkBK,SAAlB,EAAtB;AACA,QAAI4N,YAAYF,OAAOnb,GAAP,CAAW;AAAA,eAAQwV,iFAA0BA,CAAC4F,gBAAgBE,IAAhB,EAAsB9N,YAAjD,EAA+DxC,UAA/D,CAAR;AAAA,KAAX,CAAhB;AACA,WAAO2K,oDAAUA,CAACC,eAAX,CAA2ByF,SAA3B,EAAsCJ,cAAtC,CAAP;AACH,CALM;;AAOA,IAAMM,2BAA2B,SAA3BA,wBAA2B,CAAC/K,KAAD,EAAQgL,SAAR,EAA+C;AAAA,QAA5B7P,MAA4B,uEAAnB,EAAmB;AAAA,QAAf8P,UAAe;;AACnF,QAAID,cAAcrX,yDAAcA,CAACI,OAAjC,EAA0C;AAAA;;AACtCiM,cAAMkL,WAAN,CAAkBta,MAAlB,GAA2B,CAA3B;AACA,oCAAMsa,WAAN,EAAkB1a,IAAlB,8CAA0Bya,UAA1B;AACH,KAHD,MAGO;AACHjL,cAAMkL,WAAN,CAAkB1a,IAAlB,CAAuB;AACnB2a,gBAAIH,SADe;AAEnBI,kBAAMjQ,MAFa;AAGnBkQ,sBAAUJ;AAHS,SAAvB;AAKH;AACJ,CAXM;AAYA,IAAMK,4BAA4B,SAA5BA,yBAA4B,CAACC,QAAD,EAAWC,KAAX,EAAqB;AAAA;;AAC1D,mCAAMC,mBAAN,EAA0Bjb,IAA1B,iDAAkC+a,SAASE,mBAA3C,4BAAmEF,SAASL,WAA5E;AACH,CAFM;;AAIA,IAAMzP,qBAAqB,SAArBA,kBAAqB,CAAC8P,QAAD,EAAWvL,KAAX,EAAkBgL,SAAlB,EAAyD;AAAA,QAA5B7P,MAA4B,uEAAnB,EAAmB;AAAA,QAAf8P,UAAe;;AACvFF,6BAAyB/K,KAAzB,EAAgCgL,SAAhC,EAA2C7P,MAA3C,EAAmD8P,UAAnD;AACAK,8BAA0BC,QAA1B,EAAoCvL,KAApC;AACH,CAHM;;AAKP,IAAM0L,sEACD/J,oDAAaA,CAACC,MADb,EACsB;AACpB+J,eAAW,CAAC,YAAD,CADS;AAEpBC,cAAU,CAAC,IAAD,EAAO,KAAP;AAFU,CADtB,mCAKDjK,oDAAaA,CAACkB,OALb,EAKuB;AACrB8I,eAAW,CAAC,kBAAD,CADU;AAErBC,cAAU,CAAC,KAAD,EAAQ,IAAR;AAFW,CALvB,mCASDjK,oDAAaA,CAACmB,GATb,EASmB;AACjB6I,eAAW,CAAC,YAAD,EAAe,kBAAf,CADM;AAEjBC,cAAU,CAAC,IAAD,EAAO,IAAP;AAFO,CATnB,kBAAN;;AAeA,IAAMC,qBAAqB,SAArBA,kBAAqB,CAACrR,UAAD,EAAa9K,CAAb,EAAgBoc,iBAAhB,EAAsC;AAC7D,QAAIA,sBAAsB,CAAC,CAAvB,IAA4Bpc,MAAOoc,oBAAoB,CAA3D,EAA+D;AAC3D,YAAMC,KAAKvR,WAAW5J,MAAX,GAAoB,CAA/B;;AAEA4J,mBAAWuR,EAAX,IAAoBvR,WAAWuR,EAAX,EAAepR,KAAf,CAAqB,GAArB,EAA0B,CAA1B,CAApB,SAAoDjL,CAApD;AACH,KAJD,MAIO;AACH8K,mBAAWhK,IAAX,MAAmBd,CAAnB;AACH;AACJ,CARD;;AAUO,IAAMsc,2BAA2B,SAA3BA,wBAA2B,CAACxR,UAAD,EAAayR,OAAb,EAAsBvK,IAAtB,EAA+B;AACnE,QAAIwK,uBAAuB,CAAC,CAA5B;AACA,QAAIC,uBAAuB,CAAC,CAA5B;AACA,QAAMC,gBAAgB,EAAtB;AACA,QAAMC,gBAAgB,EAAtB;;AAJmE,+CAM9BX,cAAchK,IAAd,EAAoBkK,QANU;AAAA,QAM5DU,YAN4D;AAAA,QAM9CC,YAN8C;;AAQnE1N,wEAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAM8c,gBAAgBP,QAAQvc,CAAR,CAAtB;AACA8c,yBAAiBF,YAAjB,IAAiCT,mBAAmBO,aAAnB,EAAkC1c,CAAlC,EAAqCwc,oBAArC,CAAjC;AACA,SAACM,aAAD,IAAkBD,YAAlB,IAAkCV,mBAAmBQ,aAAnB,EAAkC3c,CAAlC,EAAqCyc,oBAArC,CAAlC;AACH,KAJD;AAKA,WAAO;AACH3R,oBAAY4R,cAAcvc,IAAd,CAAmB,GAAnB,CADT;AAEH4c,0BAAkBJ,cAAcxc,IAAd,CAAmB,GAAnB;AAFf,KAAP;AAIH,CAjBM;;AAoBA,IAAM6c,0BAA0B,SAA1BA,uBAA0B,CAAClS,UAAD,EAAayR,OAAb,EAAsBvK,IAAtB,EAA4BH,YAA5B,EAA0CoL,aAA1C,EAA4D;AAC/F,QAAIb,oBAAoB,EAAxB;AACA,QAAMc,kBAAkB,EAAxB;AACA,QAAMC,eAAe,EAArB;;AAEAhO,wEAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAIuc,QAAQvc,CAAR,CAAJ,EAAgB;AACZ,gBAAIuW,OAAO,EAAX;;AAEA,gBAAI6G,eAAe,EAAEvU,MAAM,EAAR,EAAnB;;AAEAgJ,yBAAajR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxB,oBAAMhF,OAAO2W,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAb;AACAuW,uBAAUA,IAAV,SAAkBjQ,IAAlB;AACA8W,6BAAavU,IAAb,CAAkByC,CAAlB,IAAuBhF,IAAvB;AACH,aAJD;;AAMA,gBAAI4W,gBAAgB3G,IAAhB,MAA0B1L,SAA9B,EAAyC;AACrCqS,gCAAgB3G,IAAhB,IAAwB,EAAxB;AACA6F,kCAAkB7F,IAAlB,IAA0B,CAAC,CAA3B;AACA4G,6BAAa5G,IAAb,IAAqB6G,YAArB;AACH;;AAEDjB,+BAAmBe,gBAAgB3G,IAAhB,CAAnB,EAA0CvW,CAA1C,EAA6Coc,kBAAkB7F,IAAlB,CAA7C;AACA6F,8BAAkB7F,IAAlB,IAA0BvW,CAA1B;AACH;AACJ,KArBD;;AAuBA,WAAO;AACHkd,wCADG;AAEHC;AAFG,KAAP;AAIH,CAhCM;;AAmCA,IAAME,eAAe,SAAfA,YAAe,CAACC,QAAD,EAAWC,QAAX,EAAqB9R,MAArB,EAA6BoQ,QAA7B,EAAuC2B,QAAvC,EAAoD;AAC5E,QAAIzO,cAAc,EAAlB;AACA,QAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,eAAM6M,SAAS5M,YAAT,EAAN;AAAA,KAApB;AAF4E,QAGpE+C,IAHoE,GAG3DvG,MAH2D,CAGpEuG,IAHoE;;AAI5E,QAAMlH,aAAawS,SAAS3T,WAA5B;AACA,QAAMyD,qBAAqBkQ,SAASnQ,kBAAT,CAA4BE,mBAAvD;;AAEA,QAAMoQ,mBAAmB,SAAnBA,gBAAmB;AAAA,eAASF,SAC9BnQ,mBAAmBuB,KAAnB,CAD8B,EAE9BA,KAF8B,EAG9BK,aAH8B,EAI9BD,WAJ8B,CAAT;AAAA,KAAzB;;AAOA,WAAOyO,SAAS1S,UAAT,EAAqB2S,gBAArB,EAAuCzL,IAAvC,CAAP;AACH,CAfM;;AAiBA,IAAM0L,qBAAqB,SAArBA,kBAAqB,CAACpN,KAAD,EAAW;AACzC,QAAMgN,WAAWhN,MAAM1B,KAAN,CAAY,KAAZ,CAAjB;AACA,QAAM1B,oBAAoBoD,MAAM/G,oBAAN,EAA1B;AACA+T,aAASzT,cAAT,GAA0BqD,kBAAkBhF,MAAlB,CAAyBpI,GAAzB,CAA6B;AAAA,eAAKO,EAAEN,IAAF,EAAL;AAAA,KAA7B,EAA4CI,IAA5C,CAAiD,GAAjD,CAA1B;;AAEA;AACA+M,sBAAkBW,gBAAlB,GAAqC,IAArC;AACAX,sBAAkBY,gBAAlB,GAAqC,IAArC;AACAZ,sBAAkBa,cAAlB,GAAmC,IAAnC;AACAuP,aAAStP,qBAAT,GAAiCC,qBAAjC;;AAEA,WAAOqP,QAAP;AACH,CAZM;;AAcP,IAAMK,SAAS,SAATA,MAAS,CAACvW,GAAD,EAAMd,IAAN,EAAYwK,EAAZ,EAAmB;AAC9B,QAAI5S,MAAM4S,GAAG1J,GAAH,EAAQd,IAAR,EAAc,CAAd,CAAV;;AAEA,SAAK,IAAItG,IAAI,CAAR,EAAW4d,MAAMxW,IAAIlG,MAA1B,EAAkClB,IAAI4d,GAAtC,EAA2C5d,GAA3C,EAAgD;AAC5C9B,cAASA,GAAT,SAAgB4S,GAAG1J,GAAH,EAAQd,IAAR,EAActG,CAAd,CAAhB;AACH;AACD,WAAO9B,GAAP;AACH,CAPD;;AASA,IAAM2f,QAAQ,SAARA,KAAQ,CAACzW,GAAD,EAAMc,MAAN,EAAcoC,GAAd,EAAmBwT,KAAnB,EAA6B;AACvC,QAAMtF,MAAMtQ,OAAOd,IAAIkD,GAAJ,CAAP,EAAiByT,aAA7B;AACA,WAAO3W,IAAIkD,GAAJ,MAAatG,iDAAb,GAAsB8Z,KAAtB,GAA8BtF,GAArC;AACH,CAHD;;AAKA,IAAMwF,oCACDlK,qDAAcA,CAACC,UADd,EAC2B,UAACyE,GAAD,EAAM/B,MAAN,EAAiB;AAC1C,QAAMwH,YAAYxH,OAAO,CAAP,aAAqBtV,KAArB,GAA6BsV,MAA7B,GAAsC,CAACA,MAAD,CAAxD;AACA,WAAOwH,UAAUC,IAAV,CAAe;AAAA,eAAO1F,OAAO2F,IAAI,CAAJ,CAAP,IAAiB3F,OAAO2F,IAAI,CAAJ,CAA/B;AAAA,KAAf,CAAP;AACH,CAJC,CAAN;;AAOA,IAAMC,iBAAiB,SAAjBA,cAAiB,CAACjgB,KAAD,EAAQsY,MAAR,EAAgB4H,SAAhB;AAAA,WAA8BL,cAAcK,SAAd,EAAyBlgB,KAAzB,EAAgCsY,MAAhC,CAA9B;AAAA,CAAvB;;AAEO,IAAM6H,yBAAyB,SAAzBA,sBAAyB,CAAChO,KAAD,EAAQiO,UAAR,EAAoC;AAAA,QAAhB9S,MAAgB,uEAAP,EAAO;;AACtE,QAAI+S,MAAM,EAAV;AACA,QAAMlD,YAAY7P,OAAO6P,SAAP,IAAoBvW,4DAAiBA,CAACC,GAAxD;AACA,QAAMyZ,kBAAkBhT,OAAOgT,eAAP,IAA0B,KAAlD;AACA,QAAMC,cAAchB,mBAAmBpN,KAAnB,CAApB;AACA,QAAMqO,oBAAoBD,YAAYpQ,eAAZ,EAA1B;;AAEA,QAAI,CAACiQ,WAAWrd,MAAhB,EAAwB;AACpBsd,cAAM,CAAC;AAAA,mBAAM,KAAN;AAAA,SAAD,CAAN;AACH,KAFD,MAEO;AACHA,cAAMD,WAAWze,GAAX,CAAe;AAAA,mBAAc,YAAmB;AAAA,oBAAlB6b,QAAkB,uEAAP,EAAO;AAAA,4CACRA,QADQ,CAC1CpM,WAD0C;AAAA,oBAC1CA,WAD0C,yCAC5B,CAAC,EAAD,EAAK,EAAL,CAD4B;AAAA,oBAClBqP,KADkB,GACRjD,QADQ,CAClBiD,KADkB;;AAAA,kDAEXrP,WAFW;AAAA;AAAA,oBAE3CtF,UAF2C,iCAE9B,EAF8B;AAAA;AAAA,oBAE1B4U,MAF0B,kCAEjB,EAFiB;;AAGlD,oBAAMC,OAAO7U,WAAW/I,MAAxB;AACA,oBAAM6d,YAAY,EAAlB;;AAEA,oBAAID,IAAJ,EAAU;AACN,yBAAK,IAAI9e,IAAI,CAAR,EAAW4d,MAAMrO,YAAYrO,MAAlC,EAA0ClB,IAAI4d,GAA9C,EAAmD5d,GAAnD,EAAwD;AACpD,4BAAMM,MAAMiP,YAAYvP,CAAZ,CAAZ;AACA,4BAAM9B,MAAMoC,IAAIH,IAAJ,EAAZ;AACA4e,kCAAU7gB,GAAV,IAAiB,CAAjB;AACH;AACJ;AACD,oBAAM8gB,YAAYte,OAAOmI,IAAP,CAAY+V,SAAS,EAArB,CAAlB;AACA,uBAAOC,OAAO3d,MAAP,IAAiB8d,UAAU9d,MAA3B,GAAoC,UAACgH,MAAD,EAASlI,CAAT,EAAe;AACtD,wBAAMif,UAAUH,OAAOC,UAAUpB,OAAO1T,UAAP,EAAmB/B,MAAnB,EAA2B2V,KAA3B,EAAkC7d,CAAlC,CAAV,CAAP,GAAyD,IAAzE;;AAEA,wBAAIye,eAAJ,EAAqB;AACjB,+BAAOO,UAAUE,KAAV,CAAgB,UAAC/W,KAAD,EAAW;AAC9B,gCAAMqQ,MAAMtQ,OAAOC,KAAP,EAAc4V,aAA1B;AACA,mCAAOK,eAAe5F,GAAf,EAAoBoG,MAAMzW,KAAN,CAApB,EAAkCwW,kBAAkBxW,KAAlB,EAAyBgX,GAAzB,CAA6B3N,OAA/D,CAAP;AACH,yBAHM,KAGDyN,OAHN;AAIH;AACD,2BAAOA,OAAP;AACH,iBAVM,GAUH;AAAA,2BAAM,KAAN;AAAA,iBAVJ;AAWH,aAzBiC,CAyB/BrO,SAzB+B,CAAb;AAAA,SAAf,CAAN;AA0BH;;AAED,QAAIwO,sBAAJ;AACA,QAAI9D,cAAcvW,4DAAiBA,CAACC,GAApC,EAAyC;AACrCoa,wBAAgBV,YAAYvK,MAAZ,CAAmB;AAAA,mBAAUqK,IAAIU,KAAJ,CAAU;AAAA,uBAAMpO,GAAG5I,MAAH,CAAN;AAAA,aAAV,CAAV;AAAA,SAAnB,EAA0D;AACtEwD,uBAAW;AAD2D,SAA1D,CAAhB;AAGH,KAJD,MAIO;AACH0T,wBAAgBV,YAAYvK,MAAZ,CAAmB;AAAA,mBAAUqK,IAAIN,IAAJ,CAAS;AAAA,uBAAMpN,GAAG5I,MAAH,CAAN;AAAA,aAAT,CAAV;AAAA,SAAnB,EAAyD;AACrEwD,uBAAW;AAD0D,SAAzD,CAAhB;AAGH;;AAED,WAAO0T,aAAP;AACH,CAlDM;;AAqDA,IAAMjN,kBAAkB,SAAlBA,eAAkB,CAAC0J,QAAD,EAAWhK,YAAX,EAA4D;AAAA,QAAnCC,SAAmC,uEAAvB;AAAA,eAAO0G,GAAP;AAAA,KAAuB;AAAA,QAAX/M,MAAW;AAAA,QAEnFC,SAFmF,GAGnFD,MAHmF,CAEnFC,SAFmF;;AAIvF,QAAMuR,gBAAgBpB,SAASnP,aAAT,GAAyBa,SAAzB,EAAtB;;AAJuF,wBASnF8P,aACAxB,SAASjN,KAAT,CAAelD,SAAf,CADA,EAEAoG,SAFA,EAGArG,MAHA,EAIAoQ,QAJA,EAKA;AAAA,0CAAIjQ,MAAJ;AAAIA,kBAAJ;AAAA;;AAAA,eAAeoR,yCAA2BpR,MAA3B,SAAmCiG,YAAnC,EAAiDoL,aAAjD,GAAf;AAAA,KALA,CATmF;AAAA,QAOnFC,eAPmF,iBAOnFA,eAPmF;AAAA,QAQnFC,YARmF,iBAQnFA,YARmF;;AAiBvF,QAAMkC,YAAY,EAAlB;AACA3e,WAAOmI,IAAP,CAAYqU,eAAZ,EAA6B5T,IAA7B,GAAoC1I,OAApC,CAA4C,UAACsJ,CAAD,EAAO;AAC/C,YAAIgT,gBAAgBhT,CAAhB,CAAJ,EAAwB;AACpB,gBAAMoV,SAASzD,SAASjN,KAAT,CAAelD,SAAf,CAAf;AACA,gBAAM6T,aAAapC,aAAajT,CAAb,CAAnB;AACAoV,mBAAO3V,WAAP,GAAqBuT,gBAAgBhT,CAAhB,EAAmB/J,IAAnB,CAAwB,GAAxB,CAArB;AACAmf,mBAAOtR,qBAAP,GAA+BC,qBAA/B;;AAEA,gBAAMuR,oBAAoB,SAApBA,iBAAoB;AAAA,uBAAU3N,aAAaqN,KAAb,CAAmB;AAAA,2BAAKhX,OAAOoD,CAAP,EAAUyS,aAAV,KAA4BwB,WAAW1W,IAAX,CAAgByC,CAAhB,CAAjC;AAAA,iBAAnB,CAAV;AAAA,aAA1B;AACA;AACA,gBAAII,SAAJ,EAAe;AACXK,mCAAmB8P,QAAnB,EAA6ByD,MAA7B,EAAqCrb,yDAAcA,CAACC,MAApD,EAA4DuH,MAA5D,EAAoE+T,iBAApE;AACH;AACDF,mBAAO9D,WAAP,CAAmB8D,OAAO9D,WAAP,CAAmBta,MAAnB,GAA4B,CAA/C,EAAkDwa,IAAlD,GAAyDyB,aAAajT,CAAb,CAAzD;;AAEAmV,sBAAUve,IAAV,CAAewe,MAAf;AACH;AACJ,KAhBD;;AAmBA,WAAOD,SAAP;AACH,CAtCM;AAuCA,IAAMI,uBAAuB,SAAvBA,oBAAuB,CAACnC,QAAD,EAAWxS,UAAX,EAAuB+Q,QAAvB,EAAiC6D,YAAjC,EAA+CnC,QAA/C,EAA4D;AAC5FD,aAAS3T,WAAT,GAAuBmB,UAAvB;AACAwS,aAAStP,qBAAT,GAAiCC,qBAAjC;AACAlC,uBACI8P,QADJ,EAEIyB,QAFJ,EAGIrZ,yDAAcA,CAACC,MAHnB,EAIK,EAAEuH,QAAQiU,YAAV,EAJL,EAKMnC,QALN;AAOH,CAVM;;AAaA,IAAMoC,kBAAkB,SAAlBA,eAAkB,CAAC9D,QAAD,EAAW0B,QAAX,EAAqBmC,YAArB,EAAmCE,WAAnC,EAAmD;AAC9E,QAAIC,eAAe,EAAnB;;AAD8E,QAGxE7N,IAHwE,GAG/D0N,YAH+D,CAGxE1N,IAHwE;;;AAK9E,QAAMsN,SAASzD,SAASjN,KAAT,CAAegR,YAAYlU,SAA3B,CAAf;AACA,QAAMoU,mBAAmBzC,aACrBiC,MADqB,EAErB/B,QAFqB,EAGrBmC,YAHqB,EAIrB7D,QAJqB,EAKrBS,wBALqB,CAAzB;AAOA,QAAML,YAAYD,cAAchK,IAAd,EAAoBiK,SAAtC;;AAEAwD,yBAAqBH,MAArB,EAA6BQ,iBAAiB7D,UAAU,CAAV,CAAjB,CAA7B,EAA6DJ,QAA7D,EAAuE6D,YAAvE,EAAqFnC,QAArF;;AAEA,QAAItB,UAAU/a,MAAV,GAAmB,CAAvB,EAA0B;AACtB2e,uBAAehE,SAASjN,KAAT,CAAegR,YAAYlU,SAA3B,CAAf;AACA+T,6BAAqBI,YAArB,EAAmCC,iBAAiB7D,UAAU,CAAV,CAAjB,CAAnC,EAAmEJ,QAAnE,EAA6E6D,YAA7E,EAA2FnC,QAA3F;AACA,eAAO,CAAC+B,MAAD,EAASO,YAAT,CAAP;AACH;;AAED,WAAOP,MAAP;AACH,CAxBM;;AA0BA,IAAMS,mBAAmB,SAAnBA,gBAAmB,CAAClE,QAAD,EAAWmE,SAAX,EAAsBvU,MAAtB,EAA8B8G,SAA9B,EAA4C;AACxE,QAAM+M,SAASzD,SAASjN,KAAT,CAAenD,OAAOC,SAAtB,CAAf;AACA,QAAIuU,gBAAgBD,SAApB;AACA,QAAIvU,OAAOuG,IAAP,KAAgBC,oDAAaA,CAACkB,OAAlC,EAA2C;AACvC8M,wBAAgB1N,UAAUqH,MAAV,CAAiB;AAAA,mBAAaoG,UAAUzV,OAAV,CAAkB0C,SAAlB,MAAiC,CAAC,CAA/C;AAAA,SAAjB,CAAhB;AACH;AACD;AACA;AACAqS,WAAOzV,cAAP,GAAwBoW,cAAc9f,IAAd,CAAmB,GAAnB,CAAxB;AACAmf,WAAOtR,qBAAP,GAA+BC,qBAA/B;;AAEAlC,uBACI8P,QADJ,EAEIyD,MAFJ,EAGIrb,yDAAcA,CAACE,OAHnB,EAII,EAAE6b,oBAAF,EAAavU,cAAb,EAAqByU,iBAAiBD,aAAtC,EAJJ,EAKI,IALJ;;AAQA,WAAOX,MAAP;AACH,CApBM;;AAuBA,IAAM3M,mBAAmB,SAAnBA,gBAAmB,CAACkJ,QAAD,EAAWsE,YAAX,EAAyB1U,MAAzB,EAAiC8G,SAAjC;AAAA,WAC5B4N,aAAargB,GAAb,CAAiB;AAAA,eACbigB,iBAAiBlE,QAAjB,EAA2BuE,UAA3B,EAAuC3U,MAAvC,EAA+C8G,SAA/C,CADa;AAAA,KAAjB,CAD4B;AAAA,CAAzB;;AAIA,IAAMpE,qBAAqB,SAArBA,kBAAqB,CAAC1G,UAAD,EAAgB;AAC9C;AACAA,iBAAa4Y,sDAAOA,CAAC,EAAR,EAAY5Y,UAAZ,CAAb;AACA,QAAI,CAACA,WAAW5B,IAAhB,EAAsB;AAClB4B,mBAAW5B,IAAX,GAAkByL,gDAASA,CAACC,SAA5B;AACH;;AAED,QAAI,CAAC9J,WAAW+J,OAAhB,EAAyB;AACrB,gBAAQ/J,WAAW5B,IAAnB;AACA,iBAAKyL,gDAASA,CAAC4B,OAAf;AACIzL,2BAAW+J,OAAX,GAAqBsC,qDAAcA,CAACC,UAApC;AACA;AACJ;AACA,iBAAKzC,gDAASA,CAACC,SAAf;AACI9J,2BAAW+J,OAAX,GAAqBC,uDAAgBA,CAACuB,WAAtC;AACA;AAPJ;AASH;;AAED,WAAOvL,UAAP;AACH,CApBM;;AAsBA,IAAM6Y,qBAAqB,SAArBA,kBAAqB,CAAC7Y,UAAD,EAAgB;AAAA,QACtC5B,IADsC,GACd4B,UADc,CACtC5B,IADsC;AAAA,QAChC2L,OADgC,GACd/J,UADc,CAChC+J,OADgC;AAAA,QACvBzR,IADuB,GACd0H,UADc,CACvB1H,IADuB;;AAE9C,QAAI8F,SAASyL,gDAASA,CAACC,SAAnB,IAAgC1L,SAASyL,gDAASA,CAAC4B,OAAvD,EAAgE;AAC5D,YAAI,CAACJ,qDAAaA,CAAC9M,GAAd,CAAkBwL,OAAlB,CAAL,EAAiC;AAC7B,kBAAM,IAAIxK,KAAJ,uDAA6DwK,OAA7D,kBAAiFzR,IAAjF,YAAN;AACH;AACJ,KAJD,MAIO;AACH,cAAM,IAAIiH,KAAJ,4CAAkDnB,IAAlD,kBAAmE9F,IAAnE,YAAN;AACH;AACJ,CATM;;AAWA,IAAMwgB,4BAA4B,SAA5BA,yBAA4B;AAAA,WAAUha,OAAOzG,GAAP,CAAW,UAAC2H,UAAD,EAAgB;AAC1EA,qBAAa0G,mBAAmB1G,UAAnB,CAAb;AACA6Y,2BAAmB7Y,UAAnB;AACA,eAAOA,UAAP;AACH,KAJkD,CAAV;AAAA,CAAlC;;AAMA,IAAM+Y,mBAAmB,SAAnBA,gBAAmB,CAACja,MAAD,EAASka,UAAT,EAAwB;AACpDla,WAAO3F,OAAP,CAAe,UAAC6G,UAAD,EAAgB;AAC3B,YAAMiZ,cAAcjZ,WAAWkZ,EAA/B;AACA,YAAI,CAACD,WAAL,EAAkB;AAAE;AAAS;;AAE7B,YAAMpW,MAAMmW,WAAWlW,OAAX,CAAmB9C,WAAW1H,IAA9B,CAAZ;AACA0gB,mBAAWnW,GAAX,IAAkBoW,WAAlB;AACAjZ,mBAAW1H,IAAX,GAAkB2gB,WAAlB;AACA,eAAOjZ,WAAWkZ,EAAlB;AACH,KARD;AASH,CAVM;;AAYA,IAAMC,aAAa,SAAbA,UAAa,CAACC,QAAD,EAAWva,IAAX,EAAiBC,MAAjB,EAAyBC,OAAzB,EAAqC;AAC3DD,aAASga,0BAA0Bha,MAA1B,CAAT;AACAC,cAAU9F,OAAOgH,MAAP,CAAchH,OAAOgH,MAAP,CAAc,EAAd,EAAkBoZ,uDAAlB,CAAd,EAAgDta,OAAhD,CAAV;AACA,QAAMZ,YAAYM,yDAAcA,CAACD,GAAf,CAAmBO,QAAQU,UAA3B,CAAlB;;AAGA,QAAI,CAACtB,SAAL,EAAgB;AACZ,cAAM,IAAIoB,KAAJ,sCAA6CR,QAAQU,UAArD,aAAN;AACH;;AAR0D,6BAU3BtB,UAAUnD,OAAV,CAAkB6D,IAAlB,EAAwBC,MAAxB,EAAgCC,OAAhC,CAV2B;AAAA;AAAA,QAUpDiC,MAVoD;AAAA,QAU5CmE,aAV4C;;AAW3D4T,qBAAiBja,MAAjB,EAAyBkC,MAAzB;AACA,QAAMkN,WAAWtG,mEAAYA,CAACzC,aAAb,EAA4BrG,MAA5B,EAAoCkC,MAApC,CAAjB;;AAEA;AACA,QAAMsY,YAAYtL,oDAAUA,CAACC,eAAX,CAA2BC,QAA3B,EAAqCnP,QAAQzG,IAA7C,CAAlB;AACA8gB,aAAS1T,kBAAT,GAA8B4T,SAA9B;;AAEA;AACAF,aAASlX,WAAT,GAAuBiD,cAAc1L,MAAd,IAAwB0L,cAAc,CAAd,EAAiB1L,MAAzC,WAAuD0L,cAAc,CAAd,EAAiB1L,MAAjB,GAA0B,CAAjF,IAAuF,EAA9G;;AAEA;AACA,QAAM8f,eAAe,EAArB;AAtB2D,QAuBnD9Y,MAvBmD,GAuBxC6Y,SAvBwC,CAuBnD7Y,MAvBmD;;AAwB3D,QAAM+Y,gBAAgB/Y,OAAOpI,GAAP,CAAW;AAAA,eAASqI,MAAM7B,IAAN,EAAT;AAAA,KAAX,CAAtB;AACA,QAAM4a,sBAAsBhZ,OAAOpI,GAAP,CAAW;AAAA,eAASqI,MAAMyE,aAAN,EAAT;AAAA,KAAX,CAA5B;AACAuC,wEAAkBA,CAAC0R,SAASlX,WAA5B,EAAyC,UAAC3J,CAAD,EAAO;AAC5CghB,qBAAahhB,CAAb,IAAkBwa,qBAAqBtS,MAArB,EAA6BgZ,mBAA7B,EAAkDD,aAAlD,EAAiEjhB,CAAjE,CAAlB;AACH,KAFD;AAGA+gB,cAAU1T,mBAAV,GAAgC2T,YAAhC;;AAEAH,aAAShX,cAAT,GAA2BtD,OAAOzG,GAAP,CAAW;AAAA,eAAKwL,EAAEvL,IAAP;AAAA,KAAX,CAAD,CAA0BI,IAA1B,EAA1B;AACA0gB,aAASpU,WAAT,GAAuBjG,QAAQU,UAAR,KAAuBd,iDAAUA,CAACC,IAAlC,GAAyCc,+DAAgBA,CAACb,IAAjB,CAAzC,GAAkEE,QAAQU,UAAjG;AACA,WAAO2Z,QAAP;AACH,CAlCM;;AAoCA,IAAMM,gBAAgB,SAAhBA,aAAgB,CAAC5a,MAAD,EAAS4B,KAAT,EAAmB;AAC5C,QAAInI,IAAI,CAAR;;AAEA,WAAOA,IAAIuG,OAAOrF,MAAlB,EAA0B,EAAElB,CAA5B,EAA+B;AAC3B,YAAImI,UAAU5B,OAAOvG,CAAP,EAAUD,IAAxB,EAA8B;AAC1B,mBAAO;AACHA,sBAAMoI,KADH;AAEHtC,sBAAMU,OAAOvG,CAAP,EAAUwR,OAAV,IAAqBjL,OAAOvG,CAAP,EAAU6F,IAFlC;AAGH8I,uBAAO3O;AAHJ,aAAP;AAKH;AACJ;AACD,WAAO,IAAP;AACH,CAbM;;AAeA,IAAMohB,yBAAyB,SAAzBA,sBAAyB,CAAC7B,UAAD,EAAgB;AAClD,QAAI3T,SAAS,EAAb;AACA,QAAI0P,kBAAJ;AACAA,gBAAYiE,WAAW9D,EAAvB;AACA,YAAQH,SAAR;AACA,aAAKrX,yDAAcA,CAACC,MAApB;AACI0H,qBAAS,CAAC2T,WAAW5D,QAAZ,CAAT;AACA;AACJ,aAAK1X,yDAAcA,CAACE,OAApB;AACIyH,qBAAS,CAAC2T,WAAW7D,IAAX,CAAgBwE,eAAjB,CAAT;AACA;AACJ,aAAKjc,yDAAcA,CAACO,IAApB;AACIoH,qBAAS,CAAC2T,WAAW5D,QAAZ,CAAT;AACA;AACJ,aAAK1X,yDAAcA,CAACG,OAApB;AACIkX,wBAAY,SAAZ;AACA1P,qBAAS,CAAC2T,WAAW7D,IAAX,CAAgB/P,aAAhB,CAA8BV,KAA9B,CAAoC,GAApC,CAAD,EAA2CsU,WAAW5D,QAAtD,CAAT;AACA;AACJ;AACIL,wBAAY,IAAZ;AAfJ;;AAkBA,WAAO;AACHA,4BADG;AAEH1P;AAFG,KAAP;AAIH,CA1BM;;AA4BP,IAAMyV,gCAAgC,SAAhCA,6BAAgC,CAACzQ,SAAD,EAAY0Q,SAAZ,EAA0B;AAC5D,QAAMC,cAAcD,UAAUE,cAAV,EAApB;AACA,QAAIC,iBAAiB7Q,SAArB;;AAEA2Q,gBAAY3gB,OAAZ,CAAoB,UAAC2e,UAAD,EAAgB;AAChC,YAAI,CAACA,UAAL,EAAiB;AACb;AACH;;AAH+B,oCAKF6B,uBAAuB7B,UAAvB,CALE;AAAA,YAKxBjE,SALwB,yBAKxBA,SALwB;AAAA,YAKb1P,MALa,yBAKbA,MALa;;AAMhC,YAAI0P,SAAJ,EAAe;AAAA;;AACXmG,6BAAiB,mCAAenG,SAAf,4CAA6B1P,MAA7B,UAAqC;AAClDF,2BAAW;AADuC,aAArC,GAAjB;AAGH;AACJ,KAXD;;AAaA,WAAO+V,cAAP;AACH,CAlBD;;AAoBA,IAAMC,mBAAmB,SAAnBA,gBAAmB,CAAC9Q,SAAD,EAAY+Q,IAAZ,EAAqB;AAC1C,SAAK,IAAI3hB,IAAI,CAAR,EAAW4d,MAAM+D,KAAKzgB,MAA3B,EAAmClB,IAAI4d,GAAvC,EAA4C5d,GAA5C,EAAiD;AAC7C,YAAMsQ,QAAQqR,KAAK3hB,CAAL,CAAd;AACA4Q,oBAAYyQ,8BAA8BzQ,SAA9B,EAAyCN,KAAzC,CAAZ;AACH;AACD,WAAOM,SAAP;AACH,CAND;;AAQA,IAAMgR,uBAAuB,SAAvBA,oBAAuB,CAACN,SAAD,EAAY1Q,SAAZ,EAA0D;AAAA,QAAnCnF,MAAmC,uEAA1B,EAA0B;AAAA,QAAtBoW,YAAsB,uEAAP,EAAO;;AACnF,QAAMC,qBAAqBD,aAAaC,kBAAxC;AACA,QAAMC,gBAAgBF,aAAaE,aAAb,IAA8B,EAApD;;AAEA,QAAIT,cAAcQ,kBAAlB,EAAsC;AAClC;AACH;;AAED,QAAME,YAAYD,cAAc7gB,MAAd,GAAuB6gB,cAAcxX,OAAd,CAAsB+W,SAAtB,MAAqC,CAAC,CAA7D,GAAiE,IAAnF;;AAEAU,iBAAaV,UAAUW,iBAAV,CAA4BrR,SAA5B,EAAuCnF,MAAvC,CAAb;;AAEA,QAAMyW,WAAWZ,UAAUa,SAA3B;AACAD,aAASthB,OAAT,CAAiB,UAACwhB,KAAD,EAAW;AACxB,YAAMX,iBAAiBJ,8BAA8BzQ,SAA9B,EAAyCwR,KAAzC,CAAvB;AACAR,6BAAqBQ,KAArB,EAA4BX,cAA5B,EAA4ChW,MAA5C,EAAoDoW,YAApD;AACH,KAHD;AAIH,CAjBD;;AAmBO,IAAM1R,sBAAsB,SAAtBA,mBAAsB,CAACG,KAAD,EAAW;AAC1C,WAAOA,MAAM+R,OAAN,IAAiB/R,MAAMkL,WAAN,CAAkB8G,IAAlB,CAAuB;AAAA,eAAK1Y,EAAE6R,EAAF,KAASxX,yDAAcA,CAACG,OAA7B;AAAA,KAAvB,CAAxB,EAAsF;AAClFkM,gBAAQA,MAAM+R,OAAd;AACH;AACD,WAAO/R,KAAP;AACH,CALM;;AAOA,IAAMP,mBAAmB,SAAnBA,gBAAmB,CAACO,KAAD,EAAW;AACvC,WAAOA,MAAM+R,OAAb,EAAsB;AAClB/R,gBAAQA,MAAM+R,OAAd;AACH;AACD,WAAO/R,KAAP;AACH,CALM;;AAOA,IAAMiS,qBAAqB,SAArBA,kBAAqB,CAACjS,KAAD,EAAsB;AAAA,QAAdqR,IAAc,uEAAP,EAAO;;AACpD,WAAOrR,MAAM+R,OAAb,EAAsB;AAClBV,aAAK7gB,IAAL,CAAUwP,KAAV;AACAA,gBAAQA,MAAM+R,OAAd;AACH;AACD,WAAOV,IAAP;AACH,CANM;;AAQA,IAAMnR,2BAA2B,SAA3BA,wBAA2B,CAACjB,WAAD,EAAca,UAAd,EAA0BoS,cAA1B,EAA0C/W,MAA1C,EAAqD;AACzF,QAAIkQ,iBAAJ;AACA,QAAI/K,kBAAJ;AAFyF,QAGjFZ,oBAHiF,GAGrCwS,cAHqC,CAGjFxS,oBAHiF;AAAA,QAG3DyS,iBAH2D,GAGrCD,cAHqC,CAG3DC,iBAH2D;;AAIzF,QAAM9S,sBAAsB6S,eAAe5S,QAA3C;AACA,QAAM8S,8BAA8BjX,OAAOiX,2BAA3C;AACA,QAAMC,WAAW,SAAXA,QAAW,CAACC,KAAD,EAAW;AACxB,YAAMhJ,SAASnO,OAAOkX,QAAP,IAAoB;AAAA,mBAAM,IAAN;AAAA,SAAnC;AACA,eAAO/I,OAAOgJ,KAAP,EAAcnX,MAAd,CAAP;AACH,KAHD;;AAKA,QAAIoX,YAAY,EAAhB;;AAEA,QAAItT,gBAAgB,IAAhB,IAAwB9D,OAAOqX,UAAP,KAAsB,IAAlD,EAAwD;AACpDD,oBAAY,CAAC;AACTlH,sBAAU;AADD,SAAD,CAAZ;AAGAA,mBAAW,EAAX;AACH,KALD,MAKO;AAAA;;AACH,YAAIoH,kBAAkBriB,OAAOme,MAAP,CAAc7O,qBAAqBgT,cAAnC,CAAtB;AACA,YAAIP,sBAAsB,KAA1B,EAAiC;AAC7BM,8BAAkBA,gBAAgBnJ,MAAhB,CAAuB;AAAA,uBAAKhQ,EAAE6B,MAAF,CAASmE,QAAT,KAAsBD,mBAA3B;AAAA,aAAvB,CAAlB;AACH;;AAED,YAAMsT,mBAAmBF,gBAAgBnJ,MAAhB,CAAuB+I,QAAvB,EAAiC7iB,GAAjC,CAAqC;AAAA,mBAAUojB,OAAOzX,MAAP,CAAckQ,QAAxB;AAAA,SAArC,CAAzB;;AAEA,YAAMoG,gBAAgB,EAAtB;;AAEA,YAAIU,sBAAsB,KAA1B,EAAiC;AAC7B,gBAAMU,wBAAwBziB,OAAOme,MAAP,CAAc7O,qBAAqBgT,cAAnC,CAA9B;;AAEAG,kCAAsBviB,OAAtB,CAA8B,UAACwiB,SAAD,EAAe;AACzC,oBAAMC,aAAaD,UAAU3X,MAA7B;AACA,oBAAI4X,WAAWC,aAAX,KAA6B,KAA7B,IAAsCD,WAAWH,MAAX,KAAsBzX,OAAOyX,MAAnE,IACIG,WAAWzT,QAAX,KAAwBD,mBADhC,EACqD;AACjDoS,kCAAcjhB,IAAd,CAAmBsiB,UAAU9S,KAA7B;AACAqL,+BAAWwH,sBAAsBvJ,MAAtB,CAA6B;AAAA,+BAAKhQ,MAAMwZ,SAAX;AAAA,qBAA7B,EAAmDtjB,GAAnD,CAAuD;AAAA,+BAAK8J,EAAE6B,MAAF,CAASkQ,QAAd;AAAA,qBAAvD,CAAX;AACAA,6BAASza,MAAT,IAAmB2hB,UAAU/hB,IAAV,CAAe;AAC9B6a,0CAD8B;AAE9B4H,gCAAQH,UAAU9S,KAFY;AAG9BqR,8BAAMY,mBAAmBa,UAAU9S,KAA7B;AAHwB,qBAAf,CAAnB;AAKH;AACJ,aAZD;AAaH;;AAGDqL,mBAAW,aAAGpY,MAAH,2CAAiB0f,gBAAjB,IAAmC1T,WAAnC,IAAiDqK,MAAjD,CAAwD;AAAA,mBAAKhQ,MAAM,IAAX;AAAA,SAAxD,CAAX;AACAiZ,kBAAU/hB,IAAV,CAAe;AACX6a,8BADW;AAEXoG,qCAAmBA,aAAnB,qBAAqCtW,OAAOsW,aAAP,IAAwB,EAA7D;AAFW,SAAf;AAIH;;AAED,QAAMjS,YAAYM,WAAWE,KAA7B;;AAEA,QAAMb,aAAa/O,OAAOgH,MAAP,CAAc;AAC7B8b,2BAAmBjU,WADU;AAE7BI;AAF6B,KAAd,EAGhBlE,MAHgB,CAAnB;;AAKA,QAAMyE,mBAAmBE,WAAWC,YAApC;AACA,QAAIqS,+BAA+BxS,gBAAnC,EAAqD;AACjDU,oBAAY0N,uBAAuBpO,gBAAvB,EAAyCyL,QAAzC,EAAmD;AAC3D8C,6BAAiBiE;AAD0C,SAAnD,CAAZ;AAGAd,6BAAqB1R,gBAArB,EAAuCU,SAAvC,EAAkDnB,UAAlD;AACH;;AAEDoT,cAAUjiB,OAAV,CAAkB,UAAC6iB,GAAD,EAAS;AACvB,YAAMC,mBAAmBpF,uBAAuBxO,SAAvB,EAAkC2T,IAAI9H,QAAtC,CAAzB;AACA,YAAMgG,OAAO8B,IAAI9B,IAAjB;;AAEA,YAAIA,IAAJ,EAAU;AACN,gBAAMvC,gBAAgBsC,iBAAiBgC,gBAAjB,EAAmC/B,KAAKgC,OAAL,EAAnC,CAAtB;AACAF,gBAAIF,MAAJ,CAAWtB,iBAAX,CAA6B7C,aAA7B,EAA4C3P,UAA5C;AACH,SAHD,MAGO;AACHmS,iCAAqB9R,SAArB,EAAgC4T,gBAAhC,EAAkDjU,UAAlD,EAA8D;AAC1DsS,+BAAe0B,IAAI1B,aADuC;AAE1DD,oCAAoBY,+BAA+BxS;AAFO,aAA9D;AAIH;AACJ,KAbD;AAcH,CAnFM;;AAqFA,IAAMO,4BAA4B,SAA5BA,yBAA4B,CAACT,oBAAD,EAAuBI,UAAvB,EAAmCoS,cAAnC,EAAsD;AAC3F,QAAMoB,mBAAmB5T,qBAAqB4T,gBAA9C;;AAEA,SAAK,IAAMV,MAAX,IAAqBU,gBAArB,EAAuC;AACnC,YAAMR,YAAYQ,iBAAiBV,MAAjB,CAAlB;AACA,YAAMG,aAAaD,UAAU3X,MAA7B;AACA,YAAMkE,sBAAsB6S,eAAe/W,MAAf,CAAsBmE,QAAlD;AACA,YAAMiU,wBAAwBrB,eAAe/S,UAAf,CAA0BoU,qBAA1B,GAC1BrB,eAAe/S,UAAf,CAA0BoU,qBAA1B,CAAgDR,UAAhD,EAA4Db,eAAe/W,MAA3E,CAD0B,GAC2D,IADzF;AAEA,YAAI4X,WAAWzT,QAAX,KAAwBD,mBAAxB,IAA+CkU,qBAAnD,EAA0E;AACtE,gBAAMC,gBAAgBT,WAAW1H,QAAjC;AACAnL,qCAAyBsT,aAAzB,EAAwC1T,UAAxC,EAAoD;AAChDJ,0DADgD;AAEhDyS,mCAAmB,KAF6B;AAGhD7S,0BAAUD;AAHsC,aAApD,EAIG0T,UAJH;AAKH;AACJ;AACJ,CAlBM;;AAoBA,IAAM9S,qBAAqB,SAArBA,kBAAqB,CAACP,oBAAD,EAA8C;AAAA,QAAvBvE,MAAuB,uEAAd,EAAc;AAAA,QAAV6E,KAAU;;AAC5E,QAAIyT,wBAAJ;AACA,QAAMrU,kBAAkBjE,OAAOiE,eAA/B;AACA,QAAMiM,WAAWlQ,OAAOkQ,QAAxB;AACA,QAAMzd,MAASuN,OAAOyX,MAAhB,SAA0BzX,OAAOmE,QAAvC;;AAEA,QAAIF,eAAJ,EAAqB;AACjBqU,0BAAkB/T,qBAAqBgT,cAAvC;AACH,KAFD,MAEO;AACHe,0BAAkB/T,qBAAqB4T,gBAAvC;AACH;;AAED,QAAIjI,aAAa,IAAjB,EAAuB;AACnB,eAAOoI,gBAAgB7lB,GAAhB,CAAP;AACH,KAFD,MAEO;AACH6lB,wBAAgB7lB,GAAhB,IAAuB;AACnBoS,wBADmB;AAEnB7E;AAFmB,SAAvB;AAIH;;AAED,WAAO,KAAP;AACH,CAtBM;;AAyBA,IAAMiH,yBAAyB,SAAzBA,sBAAyB,CAACsN,SAAD,EAAYzN,SAAZ,EAAuBD,WAAvB,EAAuC;AACzE,QAAM0R,sBAAsBhE,UAAUjY,MAAV,CAAiB,UAACC,GAAD,EAAMG,KAAN,EAAgB;AACzD,YAAIA,MAAMqE,WAAN,CAAkBzM,IAAlB,KAA2B,QAA/B,EAAyC;AACrCiI,gBAAIlH,IAAJ,+BAAYyR,UAAUqH,MAAV,CAAiB;AAAA,uBAAa3M,UAAUgX,MAAV,CAAiB9b,KAAjB,MAA4B,CAAC,CAA1C;AAAA,aAAjB,CAAZ;AACH,SAFD,MAEO,IAAIA,SAASmK,WAAb,EAA0B;AAC7BtK,gBAAIlH,IAAJ,CAASqH,KAAT;AACH;AACD,eAAOH,GAAP;AACH,KAP2B,EAOzB,EAPyB,CAA5B;AAQA,WAAO7G,MAAM+iB,IAAN,CAAW,IAAI1N,GAAJ,CAAQwN,mBAAR,CAAX,EAAyClkB,GAAzC,CAA6C;AAAA,eAASqI,MAAM/J,IAAN,EAAT;AAAA,KAA7C,CAAP;AACH,CAVM;;AAYP;;;;;;;AAOO,IAAM+lB,wBAAwB,SAAxBA,qBAAwB,CAAChc,KAAD,EAAQhK,KAAR,EAAkB;AACnD,QAAIgK,MAAMmQ,YAAV,EAAwB;AACpB,eAAOnQ,MAAMmQ,YAAN,GAAqBna,KAArB,CAAP;AACH;AACD,WAAOA,KAAP;AACH,CALM,C;;;;;;;;;;;AC9rBP,IAAM2K,YAAYsb,mBAAOA,CAAC,iCAAR,CAAlB;;AAEAC,OAAOC,OAAP,GAAiBxb,UAAUyb,OAAV,GAAoBzb,UAAUyb,OAA9B,GAAwCzb,SAAzD,C;;;;;;;;;;;;;;;;;ACFA;;;;;;IAMM8J,iB;;;;AACF;;;;;;;yCAOyBnH,M,EAAQ;AAC7B,gBAAI,CAACA,MAAL,EAAa;AACT,uBAAOmH,kBAAkB4R,oBAAzB;AACH;AACD,mBAAO9jB,OAAOgH,MAAP,CAAckL,kBAAkB4R,oBAAhC,EAAsD/Y,MAAtD,CAAP;AACH;;AAED;;;;;;;;;AAMA,+BAAatN,KAAb,EAAoB;AAAA;;AAChB,aAAKsmB,MAAL,GAActmB,KAAd;AACH;;AAED;;;;;;;;;;gCAMS;AACL,mBAAO,KAAKsmB,MAAZ;AACH;;AAED;;;;;;;;;mCAMY;AACR,mBAAO/L,OAAO,KAAK+L,MAAZ,CAAP;AACH;;;kCAEgBjM,G,EAAK;AAClB,mBAAQA,eAAe5F,iBAAhB,IAAsC,CAAC,CAACA,kBAAkBC,gBAAlB,GAAqC2F,GAArC,CAA/C;AACH;;;uCAEqBA,G,EAAK;AACvB,mBAAOA,eAAe5F,iBAAf,GAAmC4F,GAAnC,GAAyC5F,kBAAkBC,gBAAlB,GAAqC2F,GAArC,CAAhD;AACH;;;;;;AAGL;;;;;AAGA5F,kBAAkB8R,IAAlB,GAAyB,IAAI9R,iBAAJ,CAAsB,MAAtB,CAAzB;AACAA,kBAAkBoG,EAAlB,GAAuB,IAAIpG,iBAAJ,CAAsB,IAAtB,CAAvB;AACAA,kBAAkB+R,GAAlB,GAAwB,IAAI/R,iBAAJ,CAAsB,KAAtB,CAAxB;;AAEA;;;;;AAKAA,kBAAkB4R,oBAAlB,GAAyC;AACrCI,aAAShS,kBAAkBoG,EADU;AAErC6L,SAAKjS,kBAAkB+R,GAFc;AAGrCG,UAAMlS,kBAAkB8R,IAHa;AAIrC7Z,eAAW+H,kBAAkBoG;AAJQ,CAAzC;;AAOepG,gFAAf,E;;;;;;;;;;;;;;;;;;AC/EA;AACA;;AAEA,IAAMmS,kBAAkB,SAAlBA,eAAkB,CAACC,OAAD,EAAU7Z,KAAV,EAAiBC,GAAjB,EAAyB;AAC7C,QAAM6Z,UAAU,EAAhB;AACA,QAAI5a,OAAOc,KAAX;;AAEA,WAAOd,OAAOe,GAAd,EAAmB;AACf6Z,gBAAQnkB,IAAR,CAAauJ,IAAb;AACAA,gBAAQ2a,OAAR;AACH;AACDC,YAAQnkB,IAAR,CAAauJ,IAAb;;AAEA,WAAO4a,OAAP;AACH,CAXD;;AAaA,IAAMC,kBAAkB,SAAlBA,eAAkB,CAACC,YAAD,EAAehnB,KAAf,EAAyB;AAC7C,QAAIinB,UAAU,CAAd;AACA,QAAIC,WAAWF,aAAajkB,MAAb,GAAsB,CAArC;AACA,QAAIokB,eAAJ;AACA,QAAI1G,cAAJ;;AAEA;AACA,WAAOwG,WAAWC,QAAlB,EAA4B;AACxBC,iBAASF,UAAUhL,KAAKmL,KAAL,CAAW,CAACF,WAAWD,OAAZ,IAAuB,CAAlC,CAAnB;AACAxG,gBAAQuG,aAAaG,MAAb,CAAR;;AAEA,YAAInnB,SAASygB,MAAMzT,KAAf,IAAwBhN,QAAQygB,MAAMxT,GAA1C,EAA+C;AAC3C,mBAAOwT,KAAP;AACH,SAFD,MAEO,IAAIzgB,SAASygB,MAAMxT,GAAnB,EAAwB;AAC3Bga,sBAAUE,SAAS,CAAnB;AACH,SAFM,MAEA,IAAInnB,QAAQygB,MAAMzT,KAAlB,EAAyB;AAC5Bka,uBAAWC,SAAS,CAApB;AACH;AACJ;;AAED,WAAO,IAAP;AACH,CArBD;;AAuBC;;;;;;;;AAQM,SAASpU,qBAAT,CAAgCD,YAAhC,EAA8CnG,UAA9C,EAA0DW,MAA1D,EAAkE;AAAA,QAC/DwZ,OAD+D,GACnBxZ,MADmB,CAC/DwZ,OAD+D;AAAA,QACtDO,SADsD,GACnB/Z,MADmB,CACtD+Z,SADsD;AAAA,QAC3CR,OAD2C,GACnBvZ,MADmB,CAC3CuZ,OAD2C;AAAA,QAClC7Z,KADkC,GACnBM,MADmB,CAClCN,KADkC;AAAA,QAC3BC,GAD2B,GACnBK,MADmB,CAC3BL,GAD2B;;AAAA,+BAEhD6F,aAAawF,MAAb,EAFgD;AAAA;AAAA,QAE9DgP,IAF8D;AAAA,QAExDC,IAFwD;;AAIrE,QAAI,CAACT,OAAL,EAAc;AACV9Z,gBAASA,UAAU,CAAV,KAAgB,CAACA,KAAD,IAAUA,QAAQsa,IAAlC,CAAD,GAA4CA,IAA5C,GAAmDta,KAA3D;AACAC,cAAOA,QAAQ,CAAR,KAAc,CAACA,GAAD,IAAQA,MAAMsa,IAA5B,CAAD,GAAuCA,OAAO,CAA9C,GAAmDta,GAAzD;;AAEA,YAAIoa,SAAJ,EAAe;AACXR,sBAAU5K,KAAKuL,IAAL,CAAUvL,KAAKwL,GAAL,CAASxa,MAAMD,KAAf,IAAwBqa,SAAlC,CAAV;AACH;;AAEDP,kBAAUF,gBAAgBC,OAAhB,EAAyB7Z,KAAzB,EAAgCC,GAAhC,CAAV;AACH;;AAED,QAAI6Z,QAAQ,CAAR,IAAaQ,IAAjB,EAAuB;AACnBR,gBAAQY,OAAR,CAAgBJ,IAAhB;AACH;AACD,QAAIR,QAAQA,QAAQ/jB,MAAR,GAAiB,CAAzB,KAA+BwkB,IAAnC,EAAyC;AACrCT,gBAAQnkB,IAAR,CAAa4kB,OAAO,CAApB;AACH;;AAED,QAAMP,eAAe,EAArB;AACA,SAAK,IAAInlB,IAAI,CAAb,EAAgBA,IAAIilB,QAAQ/jB,MAAR,GAAiB,CAArC,EAAwClB,GAAxC,EAA6C;AACzCmlB,qBAAarkB,IAAb,CAAkB;AACdqK,mBAAO8Z,QAAQjlB,CAAR,CADO;AAEdoL,iBAAK6Z,QAAQjlB,IAAI,CAAZ;AAFS,SAAlB;AAIH;;AAED,QAAMmR,aAAa,EAAnB;AACAhC,oFAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAM2K,QAAQsG,aAAa3D,YAAb,CAA0BhH,IAA1B,CAA+BtG,CAA/B,CAAd;AACA,YAAI2K,iBAAiBiI,4DAArB,EAAwC;AACpCzB,uBAAWrQ,IAAX,CAAgB6J,KAAhB;AACA;AACH;;AAED,YAAMiU,QAAQsG,gBAAgBC,YAAhB,EAA8Bxa,KAA9B,CAAd;AACAwG,mBAAWrQ,IAAX,CAAmB8d,MAAMzT,KAAzB,SAAkCyT,MAAMxT,GAAxC;AACH,KATD;;AAWA,WAAO,EAAE+F,sBAAF,EAAcC,MAAM6T,OAApB,EAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;;AC1FD;AACA;;AAEA;;;;;;;;;;;;;;AAcA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CO,IAAM9Q,SAAS,SAATA,MAAS;AAAA,sCAAIpL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAG3R,MAAH,WAAapL,IAAb,CAAN;AAAA,KAAb;AAAA,CAAf;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BO,IAAMqL,UAAU,SAAVA,OAAU;AAAA,uCAAIrL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAG1R,OAAH,WAAcrL,IAAd,CAAN;AAAA,KAAb;AAAA,CAAhB;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyDO,IAAMmL,MAAM,SAANA,GAAM;AAAA,uCAAInL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAG5R,GAAH,WAAUnL,IAAV,CAAN;AAAA,KAAb;AAAA,CAAZ;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BO,IAAM+C,UAAU,SAAVA,OAAU;AAAA,uCAAI/C,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAGha,OAAH,WAAc/C,IAAd,CAAN;AAAA,KAAb;AAAA,CAAhB;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCO,IAAMkL,UAAU,SAAVA,OAAU;AAAA,uCAAI8R,UAAJ;AAAIA,kBAAJ;AAAA;;AAAA,WACnB,UAACD,EAAD,EAAsC;AAAA,YAAjCra,MAAiC,uEAAxB,EAAEC,WAAW,IAAb,EAAwB;;AAClC,YAAIsa,YAAYF,EAAhB;AACA,YAAIG,mBAAJ;AACA,YAAM1E,cAAc,EAApB;;AAEAwE,mBAAWnlB,OAAX,CAAmB,UAAC0a,SAAD,EAAe;AAC9B0K,wBAAY1K,UAAU0K,SAAV,CAAZ;AACAzE,wBAAYzgB,IAAZ,uCAAoBklB,UAAUxK,WAA9B;AACA,gBAAI,CAACyK,UAAL,EAAiB;AACbA,6BAAaD,SAAb;AACH;AACJ,SAND;;AAQA,YAAIC,cAAcA,eAAeD,SAAjC,EAA4C;AACxCC,uBAAWC,OAAX;AACH;;AAED;AACAF,kBAAUjK,mBAAV,GAAgC,EAAhC;AACAhQ,0EAAkBA,CACd+Z,EADJ,EAEIE,SAFJ,EAGI/hB,yDAAcA,CAACI,OAHnB,EAII,IAJJ,EAKIkd,WALJ;;AAQA,YAAI9V,OAAOC,SAAX,EAAsB;AAClBsa,sBAAU9Z,SAAV,CAAoB4Z,EAApB;AACH,SAFD,MAEO;AACHE,sBAAU9Z,SAAV,CAAoB,IAApB;AACH;;AAED,eAAO8Z,SAAP;AACH,KAnCkB;AAAA,CAAhB,C;;;;;;;;;;;;ACvNP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AAKA,SAASG,eAAT,GAA2B;AAAE,WAAO,IAAP;AAAc;;AAE3C;;;;;;;;;;AAUO,SAAS7R,YAAT,CAAuB8R,GAAvB,EAA4BC,GAA5B,EAAiC1D,QAAjC,EAAgG;AAAA,QAArD2D,mBAAqD,uEAA/B,KAA+B;AAAA,QAAxBC,QAAwB,uEAAb9hB,gDAAKA,CAACC,KAAO;;AACnG,QAAM6B,SAAS,EAAf;AACA,QAAMD,OAAO,EAAb;AACA,QAAMkgB,qBAAqB7D,YAAYwD,eAAvC;AACA,QAAMM,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA,QAAMia,oBAAoBF,cAAc1mB,IAAxC;AACA,QAAM6mB,oBAAoBF,cAAc3mB,IAAxC;AACA,QAAMA,OAAU0mB,cAAc1mB,IAAxB,SAAgC2mB,cAAc3mB,IAApD;AACA,QAAM8mB,mBAAmBC,0EAAeA,CAACL,aAAhB,EAA+BC,aAA/B,CAAzB;;AAEA,QAAIC,sBAAsBC,iBAA1B,EAA6C;AACzC,cAAM,IAAI5f,KAAJ,CAAU,4CAAV,CAAN;AACH;AACD;AACAyf,kBAAcve,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,YAAM4e,YAAY1G,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAlB;AACA,YAAIsgB,iBAAiBtc,OAAjB,CAAyBwc,UAAUhnB,IAAnC,MAA6C,CAAC,CAA9C,IAAmD,CAACumB,mBAAxD,EAA6E;AACzES,sBAAUhnB,IAAV,GAAoB0mB,cAAc1mB,IAAlC,SAA0CgnB,UAAUhnB,IAApD;AACH;AACDwG,eAAOzF,IAAP,CAAYimB,SAAZ;AACH,KAND;AAOAL,kBAAcxe,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,YAAM4e,YAAY1G,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAlB;AACA,YAAIsgB,iBAAiBtc,OAAjB,CAAyBwc,UAAUhnB,IAAnC,MAA6C,CAAC,CAAlD,EAAqD;AACjD,gBAAI,CAACumB,mBAAL,EAA0B;AACtBS,0BAAUhnB,IAAV,GAAoB2mB,cAAc3mB,IAAlC,SAA0CgnB,UAAUhnB,IAApD;AACAwG,uBAAOzF,IAAP,CAAYimB,SAAZ;AACH;AACJ,SALD,MAKO;AACHxgB,mBAAOzF,IAAP,CAAYimB,SAAZ;AACH;AACJ,KAVD;;AAYA;AACA5X,oFAAkBA,CAACiX,IAAIzc,WAAvB,EAAoC,UAAC3J,CAAD,EAAO;AACvC,YAAIgnB,WAAW,KAAf;AACA,YAAIC,oBAAJ;AACA9X,wFAAkBA,CAACkX,IAAI1c,WAAvB,EAAoC,UAACud,EAAD,EAAQ;AACxC,gBAAMC,QAAQ,EAAd;AACA,gBAAMC,UAAU,EAAhB;AACAA,oBAAQT,iBAAR,IAA6B,EAA7B;AACAS,oBAAQR,iBAAR,IAA6B,EAA7B;AACAH,0BAAcve,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpCgf,sBAAMrmB,IAAN,CAAWqH,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAAX;AACAonB,wBAAQT,iBAAR,EAA2Bxe,MAAMpI,IAAN,EAA3B,IAA2C;AACvC8a,8BAAU1S,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAD6B;AAEvC4a,oCAAgBzS,MAAMyE,aAAN,GAAsB5M,CAAtB;AAFuB,iBAA3C;AAIH,aAND;AAOA0mB,0BAAcxe,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,oBAAI,EAAE0e,iBAAiBtc,OAAjB,CAAyBpC,MAAM5B,MAAN,GAAexG,IAAxC,MAAkD,CAAC,CAAnD,IAAwDumB,mBAA1D,CAAJ,EAAoF;AAChFa,0BAAMrmB,IAAN,CAAWqH,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwB4gB,EAAxB,CAAX;AACH;AACDE,wBAAQR,iBAAR,EAA2Bze,MAAMpI,IAAN,EAA3B,IAA2C;AACvC8a,8BAAU1S,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwB4gB,EAAxB,CAD6B;AAEvCtM,oCAAgBzS,MAAMyE,aAAN,GAAsBsa,EAAtB;AAFuB,iBAA3C;AAIH,aARD;;AAUA,gBAAInY,cAAc,EAAlB;AACA,gBAAIsY,iBAAiB,SAAjBA,cAAiB;AAAA,uBAAMjB,IAAInX,YAAJ,EAAN;AAAA,aAArB;AACA,gBAAIqY,iBAAiB,SAAjBA,cAAiB;AAAA,uBAAMjB,IAAIpX,YAAJ,EAAN;AAAA,aAArB;;AAEA,gBAAMsY,YAAY5M,+DAAeA,CAACyM,QAAQT,iBAAR,CAAhB,CAAlB;AACA,gBAAMa,YAAY7M,+DAAeA,CAACyM,QAAQR,iBAAR,CAAhB,CAAlB;AACA,gBAAIJ,mBAAmBe,SAAnB,EAA8BC,SAA9B,EAAyCH,cAAzC,EAAyDC,cAAzD,EAAyEvY,WAAzE,CAAJ,EAA2F;AACvF,oBAAM0Y,WAAW,EAAjB;AACAN,sBAAMvmB,OAAN,CAAc,UAAC8mB,OAAD,EAAUC,GAAV,EAAkB;AAC5BF,6BAASlhB,OAAOohB,GAAP,EAAY5nB,IAArB,IAA6B2nB,OAA7B;AACH,iBAFD;AAGA,oBAAIV,YAAYviB,gDAAKA,CAACC,KAAN,KAAgB6hB,QAAhC,EAA0C;AACtCjgB,yBAAK2gB,WAAL,IAAoBQ,QAApB;AACH,iBAFD,MAGK;AACDnhB,yBAAKxF,IAAL,CAAU2mB,QAAV;AACAT,+BAAW,IAAX;AACAC,kCAAcjnB,CAAd;AACH;AACJ,aAbD,MAaO,IAAI,CAACumB,aAAa9hB,gDAAKA,CAACE,SAAnB,IAAgC4hB,aAAa9hB,gDAAKA,CAACG,UAApD,KAAmE,CAACoiB,QAAxE,EAAkF;AACrF,oBAAMS,YAAW,EAAjB;AACA,oBAAI7J,MAAM6I,cAAcve,MAAd,CAAqBhH,MAArB,GAA8B,CAAxC;AACAimB,sBAAMvmB,OAAN,CAAc,UAAC8mB,OAAD,EAAUC,GAAV,EAAkB;AAC5B,wBAAIA,OAAO/J,GAAX,EAAgB;AACZ6J,kCAASlhB,OAAOohB,GAAP,EAAY5nB,IAArB,IAA6B2nB,OAA7B;AACH,qBAFD,MAGK;AACDD,kCAASlhB,OAAOohB,GAAP,EAAY5nB,IAArB,IAA6B,IAA7B;AACH;AACJ,iBAPD;AAQAinB,2BAAW,IAAX;AACAC,8BAAcjnB,CAAd;AACAsG,qBAAKxF,IAAL,CAAU2mB,SAAV;AACH;AACJ,SAxDD;AAyDH,KA5DD;;AA8DA,WAAO,IAAI3e,kDAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;;;;;;;ACzHD;AACA;;AAEA;;;;;;;;;;;AAWO,SAAS0J,WAAT,CAAsBgM,UAAtB,EAAkC3K,UAAlC,EAA8CkQ,aAA9C,EAA6D7O,cAA7D,EAA6E3F,OAA7E,EAAsF;AACzF,QAAMyC,aAAa;AACfc,gBAAQ,KADO;AAEfD,oBAAY;AAFG,KAAnB;AAIAtD,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBuB,UAAlB,EAA8BzC,OAA9B,CAAV;;AAEA,QAAMohB,SAAS;AACXrhB,gBAAQ,EADG;AAEXD,cAAM,EAFK;AAGX0D,cAAM;AAHK,KAAf;AAKA,QAAMD,SAASvD,QAAQuD,MAAvB;AACA,QAAM8d,aAAa1b,kBAAkBA,eAAejL,MAAf,GAAwB,CAA7D;AACA;AACA,QAAM4mB,aAAa,EAAnB;AACA;AACA,QAAMC,UAAU/M,cAAc/P,KAAd,CAAoB,GAApB,CAAhB;;AAEA8c,YAAQnnB,OAAR,CAAgB,UAAConB,OAAD,EAAa;AACzB,aAAK,IAAIhoB,IAAI,CAAb,EAAgBA,IAAIyV,WAAWvU,MAA/B,EAAuClB,KAAK,CAA5C,EAA+C;AAC3C,gBAAIyV,WAAWzV,CAAX,EAAcD,IAAd,OAAyBioB,OAA7B,EAAsC;AAClCF,2BAAWhnB,IAAX,CAAgB2U,WAAWzV,CAAX,CAAhB;AACA;AACH;AACJ;AACJ,KAPD;;AASA;AACA8nB,eAAWlnB,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B;AACAyf,eAAOrhB,MAAP,CAAczF,IAAd,CAAmBqH,MAAM5B,MAAN,EAAnB;AACH,KAHD;;AAKA,QAAIwD,MAAJ,EAAY;AACR6d,eAAOrhB,MAAP,CAAczF,IAAd,CAAmB;AACff,kBAAM,KADS;AAEf8F,kBAAM;AAFS,SAAnB;AAIH;;AAEDsJ,oFAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC4nB,eAAOthB,IAAP,CAAYxF,IAAZ,CAAiB,EAAjB;AACA,YAAMmnB,YAAYL,OAAOthB,IAAP,CAAYpF,MAAZ,GAAqB,CAAvC;AACA,YAAIiK,QAAQ,CAAZ;AACA2c,mBAAWlnB,OAAX,CAAmB,UAACuH,KAAD,EAAQ+e,EAAR,EAAe;AAC9BU,mBAAOthB,IAAP,CAAY2hB,SAAZ,EAAuBf,KAAK/b,KAA5B,IAAqChD,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAArC;AACH,SAFD;AAGA,YAAI+J,MAAJ,EAAY;AACR6d,mBAAOthB,IAAP,CAAY2hB,SAAZ,EAAuBH,WAAW5mB,MAAlC,IAA4ClB,CAA5C;AACH;AACD;AACA4nB,eAAO5d,IAAP,CAAYlJ,IAAZ,CAAiBd,CAAjB;;AAEA;AACA;AACA,YAAI6nB,UAAJ,EAAgB;AAAED,mBAAOthB,IAAP,CAAY2hB,SAAZ,EAAuBnnB,IAAvB,CAA4Bd,CAA5B;AAAiC;AACtD,KAhBD;;AAkBA;AACA,QAAI6nB,UAAJ,EAAgB;AACZK,8DAAQA,CAACN,MAAT,EAAiBzb,cAAjB;AACH;;AAED,QAAI3F,QAAQsD,UAAZ,EAAwB;AACpB,YAAMqe,UAAUhnB,0CAASA,MAAMymB,OAAOrhB,MAAP,CAAcrF,MAApB,CAAT,GAAsCpB,GAAtC,CAA0C;AAAA,mBAAM,EAAN;AAAA,SAA1C,CAAhB;AACA8nB,eAAOthB,IAAP,CAAY1F,OAAZ,CAAoB,UAACumB,KAAD,EAAW;AAC3BA,kBAAMvmB,OAAN,CAAc,UAAC0F,IAAD,EAAOtG,CAAP,EAAa;AACvBmoB,wBAAQnoB,CAAR,EAAWc,IAAX,CAAgBwF,IAAhB;AACH,aAFD;AAGH,SAJD;AAKAshB,eAAOthB,IAAP,GAAc6hB,OAAd;AACH;;AAED,WAAOP,MAAP;AACH,C;;;;;;;;;;;;ACzFD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;AAEA;;;;;;;;;AASO,SAASrT,UAAT,CAAqB6R,GAArB,EAA0BC,GAA1B,EAA+B;AAClC,QAAM+B,YAAY,EAAlB;AACA,QAAM7hB,SAAS,EAAf;AACA,QAAM8hB,gBAAgB,EAAtB;AACA,QAAM/hB,OAAO,EAAb;AACA,QAAMmgB,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA,QAAM4b,wBAAwB7B,cAAclZ,SAAd,EAA9B;AACA,QAAMgb,wBAAwB7B,cAAcnZ,SAAd,EAA9B;AACA,QAAMxN,OAAU0mB,cAAc1mB,IAAxB,eAAsC2mB,cAAc3mB,IAA1D;;AAED;AACC,QAAI,CAACyoB,gEAAUA,CAACpC,IAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAX,EAAiD+c,IAAIxc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAjD,CAAL,EAA6F;AACzF,eAAO,IAAP;AACH;;AAED;AACC8c,QAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,CAAD,CAAgCrK,OAAhC,CAAwC,UAACqM,SAAD,EAAe;AACnD,YAAM9E,QAAQmgB,sBAAsBrb,SAAtB,CAAd;AACA1G,eAAOzF,IAAP,CAAYuf,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAZ;AACA8hB,sBAAcvnB,IAAd,CAAmBqH,MAAM5B,MAAN,GAAexG,IAAlC;AACH,KAJD;;AAMA;;;;;;;AAOA,aAAS0oB,iBAAT,CAA2B3C,EAA3B,EAA+BvY,SAA/B,EAA0Cmb,OAA1C,EAAmD;AAC/CvZ,wFAAkBA,CAAC2W,GAAGnc,WAAtB,EAAmC,UAAC3J,CAAD,EAAO;AACtC,gBAAMmnB,QAAQ,EAAd;AACA,gBAAIwB,WAAW,EAAf;AACAN,0BAAcznB,OAAd,CAAsB,UAACgoB,UAAD,EAAgB;AAClC,oBAAMzqB,QAAQoP,UAAUqb,UAAV,EAAsBtb,YAAtB,CAAmChH,IAAnC,CAAwCtG,CAAxC,CAAd;AACA2oB,kCAAgBxqB,KAAhB;AACAgpB,sBAAMyB,UAAN,IAAoBzqB,KAApB;AACH,aAJD;AAKA,gBAAI,CAACiqB,UAAUO,QAAV,CAAL,EAA0B;AACtB,oBAAID,OAAJ,EAAa;AAAEpiB,yBAAKxF,IAAL,CAAUqmB,KAAV;AAAmB;AAClCiB,0BAAUO,QAAV,IAAsB,IAAtB;AACH;AACJ,SAZD;AAaH;;AAED;AACAF,sBAAkBpC,GAAlB,EAAuBkC,qBAAvB,EAA8C,KAA9C;AACAE,sBAAkBrC,GAAlB,EAAuBkC,qBAAvB,EAA8C,IAA9C;;AAEA,WAAO,IAAIxf,kDAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;ACjED;AAAA;AAAA;;;;;;;;AAQO,SAAS+mB,eAAT,CAA0B+B,GAA1B,EAA+BC,GAA/B,EAAoC;AACvC,QAAMC,SAAS,EAAf;AACA,QAAMC,SAAS,EAAf;AACAH,QAAI3gB,MAAJ,CAAWtH,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B6gB,eAAOloB,IAAP,CAAYqH,MAAM5B,MAAN,GAAexG,IAA3B;AACH,KAFD;AAGA+oB,QAAI5gB,MAAJ,CAAWtH,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B,YAAI6gB,OAAOze,OAAP,CAAepC,MAAM5B,MAAN,GAAexG,IAA9B,MAAwC,CAAC,CAA7C,EAAgD;AAC5CgpB,mBAAOjoB,IAAP,CAAYqH,MAAM5B,MAAN,GAAexG,IAA3B;AACH;AACJ,KAJD;AAKA,WAAOgpB,MAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;;;;;ACpBD;AACA;AACA;;IAEQzV,G,GAAgDD,yD,CAAhDC,G;IAAKC,G,GAA2CF,yD,CAA3CE,G;IAAKG,K,GAAsCL,yD,CAAtCK,K;IAAOC,I,GAA+BN,yD,CAA/BM,I;IAAMC,K,GAAyBP,yD,CAAzBO,K;IAAOC,G,GAAkBR,yD,CAAlBQ,G;IAAKL,G,GAAaH,yD,CAAbG,G;IAAKC,G,GAAQJ,yD,CAARI,G;;;AAEhD,SAASwV,iBAAT,CAA2B7hB,GAA3B,EAAgC;AAC5B,WAAOA,IAAIwS,MAAJ,CAAW;AAAA,eAAQ,EAAEhR,gBAAgBgK,4DAAlB,CAAR;AAAA,KAAX,CAAP;AACH;AACD;;;;;;;AAOA,SAASsW,GAAT,CAAc9hB,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C,YAAMgoB,iBAAiBF,kBAAkB7hB,GAAlB,CAAvB;AACA,YAAMgiB,WAAWD,eAAejoB,MAAf,GACGioB,eAAephB,MAAf,CAAsB,UAACC,GAAD,EAAMqhB,IAAN;AAAA,mBAAerhB,MAAMqhB,IAArB;AAAA,SAAtB,EAAiD,CAAjD,CADH,GAEKzW,4DAAiBA,CAAC8R,IAFxC;AAGA,eAAO0E,QAAP;AACH;AACD,WAAOxW,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS4E,GAAT,CAAcliB,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C,YAAMioB,WAAWF,IAAI9hB,GAAJ,CAAjB;AACA,YAAMwW,MAAMxW,IAAIlG,MAAJ,IAAc,CAA1B;AACA,eAAQgK,OAAO3M,KAAP,CAAa6qB,QAAb,KAA0BA,oBAAoBxW,4DAA/C,GACEA,4DAAiBA,CAAC8R,IADpB,GAC2B0E,WAAWxL,GAD7C;AAEH;AACD,WAAOhL,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS7N,GAAT,CAAczP,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C;AACA,YAAMooB,iBAAiBN,kBAAkB7hB,GAAlB,CAAvB;;AAEA,eAAQmiB,eAAeroB,MAAhB,GAA0BkZ,KAAKvD,GAAL,gCAAY0S,cAAZ,EAA1B,GAAwD3W,4DAAiBA,CAAC8R,IAAjF;AACH;AACD,WAAO9R,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS3N,GAAT,CAAc3P,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C;AACA,YAAMooB,iBAAiBN,kBAAkB7hB,GAAlB,CAAvB;;AAEA,eAAQmiB,eAAeroB,MAAhB,GAA0BkZ,KAAKrD,GAAL,gCAAYwS,cAAZ,EAA1B,GAAwD3W,4DAAiBA,CAAC8R,IAAjF;AACH;AACD,WAAO9R,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS8E,KAAT,CAAgBpiB,GAAhB,EAAqB;AACjB,WAAOA,IAAI,CAAJ,CAAP;AACH;;AAED;;;;;;;AAOA,SAASqiB,IAAT,CAAeriB,GAAf,EAAoB;AAChB,WAAOA,IAAIA,IAAIlG,MAAJ,GAAa,CAAjB,CAAP;AACH;;AAED;;;;;;;AAOA,SAASwoB,KAAT,CAAgBtiB,GAAhB,EAAqB;AACjB,QAAIC,sDAAOA,CAACD,GAAR,CAAJ,EAAkB;AACd,eAAOA,IAAIlG,MAAX;AACH;AACD,WAAO0R,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;AAMA,SAASiF,QAAT,CAAmBviB,GAAnB,EAAwB;AACpB,QAAIwiB,OAAON,IAAIliB,GAAJ,CAAX;AACA,WAAOkiB,IAAIliB,IAAItH,GAAJ,CAAQ;AAAA,wBAAQ+pB,MAAMD,IAAd,EAAuB,CAAvB;AAAA,KAAR,CAAJ,CAAP;AACH;;AAED;;;;;;;AAOA,SAASE,GAAT,CAAc1iB,GAAd,EAAmB;AACf,WAAOgT,KAAK2P,IAAL,CAAUJ,SAASviB,GAAT,CAAV,CAAP;AACH;;AAGD,IAAM4iB,iDACD1W,GADC,EACK4V,GADL,4BAED3V,GAFC,EAEK+V,GAFL,4BAGD9V,GAHC,EAGKqD,GAHL,4BAIDpD,GAJC,EAIKsD,GAJL,4BAKDrD,KALC,EAKO8V,KALP,4BAMD7V,IANC,EAMM8V,IANN,4BAOD7V,KAPC,EAOO8V,KAPP,4BAQD7V,GARC,EAQKiW,GARL,WAAN;;AAWA,IAAMzR,qBAAqB/E,GAA3B;;;;;;;;;;;;;;;;;;;;;;;;;;ACpJA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AAOA,SAAS2W,WAAT,CAAsB3I,SAAtB,EAAiC3L,QAAjC,EAA2C;AACvC,QAAMoT,SAAS,EAAf;AACA,QAAMtT,aAAa6L,UAAU5U,aAAV,EAAnB;AACA,QAAMwd,aAAazU,WAAWO,YAAX,EAAnB;;AAEAtV,WAAOga,OAAP,CAAewP,UAAf,EAA2BtpB,OAA3B,CAAmC,gBAAW;AAAA;AAAA,YAAT1C,GAAS;;AAC1C,YAAIyX,YAAYA,SAASzU,MAAzB,EAAiC;AAC7B,gBAAIyU,SAASpL,OAAT,CAAiBrM,GAAjB,MAA0B,CAAC,CAA/B,EAAkC;AAC9B6qB,uBAAOjoB,IAAP,CAAY5C,GAAZ;AACH;AACJ,SAJD,MAIO;AACH6qB,mBAAOjoB,IAAP,CAAY5C,GAAZ;AACH;AACJ,KARD;;AAUA,WAAO6qB,MAAP;AACH;;AAED;;;;;;;AAOA,SAASoB,aAAT,CAAwB7I,SAAxB,EAAkD;AAAA,QAAf9V,QAAe,uEAAJ,EAAI;;AAC9C,QAAMoc,SAAS,EAAf;AACA,QAAMnS,aAAa6L,UAAU5U,aAAV,EAAnB;AACA,QAAM0d,WAAW3U,WAAWK,UAAX,EAAjB;AACA,QAAMuU,aAAape,4DAAYA,CAACD,cAAb,EAAnB;;AAEAtL,WAAOmI,IAAP,CAAYuhB,QAAZ,EAAsBxpB,OAAtB,CAA8B,UAAC0pB,WAAD,EAAiB;AAC3C,YAAI,OAAO9e,SAAS8e,WAAT,CAAP,KAAiC,QAArC,EAA+C;AAC3C9e,qBAAS8e,WAAT,IAAwBF,SAASE,WAAT,EAAsBlS,QAAtB,EAAxB;AACH;AACD,YAAMtG,YAAY7F,4DAAYA,CAACse,OAAb,CAAqB/e,SAAS8e,WAAT,CAArB,CAAlB;AACA,YAAIxY,SAAJ,EAAe;AACX8V,mBAAO0C,WAAP,IAAsBxY,SAAtB;AACH,SAFD,MAEO;AACH8V,mBAAO0C,WAAP,IAAsBD,UAAtB;AACA7e,qBAAS8e,WAAT,IAAwBjS,qEAAxB;AACH;AACJ,KAXD;AAYA,WAAOuP,MAAP;AACH;;AAED;;;;;;;;;AASA,SAAS9b,OAAT,CAAkBwV,SAAlB,EAA6B3L,QAA7B,EAAuCnK,QAAvC,EAAiDgf,iBAAjD,EAAoE;AAChE,QAAMC,YAAYR,YAAY3I,SAAZ,EAAuB3L,QAAvB,CAAlB;AACA,QAAM+U,aAAaP,cAAc7I,SAAd,EAAyB9V,QAAzB,CAAnB;AACA,QAAMiK,aAAa6L,UAAU5U,aAAV,EAAnB;AACA,QAAMuQ,gBAAgBxH,WAAWlI,SAAX,EAAtB;AACA,QAAMod,SAASlV,WAAW1V,IAA1B;AACA,QAAM8R,eAAe,EAArB;AACA,QAAM+Y,aAAa,EAAnB;AACA,QAAMrkB,SAAS,EAAf;AACA,QAAMskB,UAAU,EAAhB;AACA,QAAMvkB,OAAO,EAAb;AACA,QAAIuF,qBAAJ;;AAEA;AACAnL,WAAOga,OAAP,CAAeuC,aAAf,EAA8Brc,OAA9B,CAAsC,iBAAkB;AAAA;AAAA,YAAhB1C,GAAgB;AAAA,YAAXC,KAAW;;AACpD,YAAIssB,UAAUlgB,OAAV,CAAkBrM,GAAlB,MAA2B,CAAC,CAA5B,IAAiCwsB,WAAWxsB,GAAX,CAArC,EAAsD;AAClDqI,mBAAOzF,IAAP,CAAYuf,sDAAOA,CAAC,EAAR,EAAYliB,MAAMoI,MAAN,EAAZ,CAAZ;;AAEA,oBAAQpI,MAAMoI,MAAN,GAAeV,IAAvB;AACA,qBAAKyL,gDAASA,CAAC4B,OAAf;AACI0X,+BAAW9pB,IAAX,CAAgB5C,GAAhB;AACA;AACJ;AACA,qBAAKoT,gDAASA,CAACC,SAAf;AACIM,iCAAa/Q,IAAb,CAAkB5C,GAAlB;AANJ;AAQH;AACJ,KAbD;AAcA;AACA,QAAI4sB,WAAW,CAAf;AACA3b,oFAAkBA,CAACmS,UAAU3X,WAA7B,EAA0C,UAAC3J,CAAD,EAAO;AAC7C,YAAIuW,OAAO,EAAX;AACA1E,qBAAajR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxBiL,mBAAUA,IAAV,SAAkB0G,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAlB;AACH,SAFD;AAGA,YAAI6qB,QAAQtU,IAAR,MAAkB1L,SAAtB,EAAiC;AAC7BggB,oBAAQtU,IAAR,IAAgBuU,QAAhB;AACAxkB,iBAAKxF,IAAL,CAAU,EAAV;AACA+Q,yBAAajR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxBhF,qBAAKwkB,QAAL,EAAexf,CAAf,IAAoB2R,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAApB;AACH,aAFD;AAGA4qB,uBAAWhqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBhF,qBAAKwkB,QAAL,EAAexf,CAAf,IAAoB,CAAC2R,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAD,CAApB;AACH,aAFD;AAGA8qB,wBAAY,CAAZ;AACH,SAVD,MAUO;AACHF,uBAAWhqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBhF,qBAAKukB,QAAQtU,IAAR,CAAL,EAAoBjL,CAApB,EAAuBxK,IAAvB,CAA4Bmc,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAA5B;AACH,aAFD;AAGH;AACJ,KApBD;;AAsBA;AACA,QAAI+O,cAAc,EAAlB;AACA,QAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,eAAMsS,UAAUrS,YAAV,EAAN;AAAA,KAApB;AACA3I,SAAK1F,OAAL,CAAa,UAACN,GAAD,EAAS;AAClB,YAAM6mB,QAAQ7mB,GAAd;AACAsqB,mBAAWhqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtB6b,kBAAM7b,CAAN,IAAWof,WAAWpf,CAAX,EAAchL,IAAIgL,CAAJ,CAAd,EAAsB0D,aAAtB,EAAqCD,WAArC,CAAX;AACH,SAFD;AAGH,KALD;AAMA,QAAIyb,iBAAJ,EAAuB;AACnBA,0BAAkBxc,qBAAlB;AACAnC,uBAAe2e,iBAAf;AACH,KAHD,MAIK;AACD3e,uBAAe,IAAI/C,+CAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,MAAM4qB,MAAR,EAA5B,CAAf;AACH;AACD,WAAO9e,YAAP;AACH;;;;;;;;;;;;;;AC1ID;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;ACbA;AAAA;AAAA;;;;;;;AAOA,SAASkf,SAAT,CAAoBlR,CAApB,EAAuBC,CAAvB,EAA0B;AACtB,QAAMkR,UAAQnR,CAAd;AACA,QAAMoR,UAAQnR,CAAd;AACA,QAAIkR,KAAKC,EAAT,EAAa;AACT,eAAO,CAAC,CAAR;AACH;AACD,QAAID,KAAKC,EAAT,EAAa;AACT,eAAO,CAAP;AACH;AACD,WAAO,CAAP;AACH;;AAED;;;;;;;;;;AAUA,SAASC,KAAT,CAAgB9jB,GAAhB,EAAqB+jB,EAArB,EAAyBC,GAAzB,EAA8BC,EAA9B,EAAkCC,MAAlC,EAA0C;AACtC,QAAMC,UAAUnkB,GAAhB;AACA,QAAMokB,SAAS,EAAf;AACA,SAAK,IAAIxrB,IAAImrB,EAAb,EAAiBnrB,KAAKqrB,EAAtB,EAA0BrrB,KAAK,CAA/B,EAAkC;AAC9BwrB,eAAOxrB,CAAP,IAAYurB,QAAQvrB,CAAR,CAAZ;AACH;AACD,QAAI6Z,IAAIsR,EAAR;AACA,QAAIrR,IAAIsR,MAAM,CAAd;;AAEA,SAAK,IAAIprB,KAAImrB,EAAb,EAAiBnrB,MAAKqrB,EAAtB,EAA0BrrB,MAAK,CAA/B,EAAkC;AAC9B,YAAI6Z,IAAIuR,GAAR,EAAa;AACTG,oBAAQvrB,EAAR,IAAawrB,OAAO1R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHD,MAGO,IAAIA,IAAIuR,EAAR,EAAY;AACfE,oBAAQvrB,EAAR,IAAawrB,OAAO3R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHM,MAGA,IAAIyR,OAAOE,OAAO3R,CAAP,CAAP,EAAkB2R,OAAO1R,CAAP,CAAlB,KAAgC,CAApC,EAAuC;AAC1CyR,oBAAQvrB,EAAR,IAAawrB,OAAO3R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHM,MAGA;AACH0R,oBAAQvrB,EAAR,IAAawrB,OAAO1R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH;AACJ;AACJ;;AAED;;;;;;;;;;AAUA,SAASxQ,IAAT,CAAelC,GAAf,EAAoB+jB,EAApB,EAAwBE,EAAxB,EAA4BC,MAA5B,EAAoC;AAChC,QAAID,OAAOF,EAAX,EAAe;AAAE,eAAO/jB,GAAP;AAAa;;AAE9B,QAAMgkB,MAAMD,KAAK/Q,KAAKmL,KAAL,CAAW,CAAC8F,KAAKF,EAAN,IAAY,CAAvB,CAAjB;AACA7hB,SAAKlC,GAAL,EAAU+jB,EAAV,EAAcC,GAAd,EAAmBE,MAAnB;AACAhiB,SAAKlC,GAAL,EAAUgkB,MAAM,CAAhB,EAAmBC,EAAnB,EAAuBC,MAAvB;AACAJ,UAAM9jB,GAAN,EAAW+jB,EAAX,EAAeC,GAAf,EAAoBC,EAApB,EAAwBC,MAAxB;;AAEA,WAAOlkB,GAAP;AACH;;AAED;;;;;;;;;AASO,SAASqkB,SAAT,CAAoBrkB,GAApB,EAA6C;AAAA,QAApBkkB,MAAoB,uEAAXP,SAAW;;AAChD,QAAI3jB,IAAIlG,MAAJ,GAAa,CAAjB,EAAoB;AAChBoI,aAAKlC,GAAL,EAAU,CAAV,EAAaA,IAAIlG,MAAJ,GAAa,CAA1B,EAA6BoqB,MAA7B;AACH;AACD,WAAOlkB,GAAP;AACH,C;;;;;;;;;;;;AC1FD;AAAA;AAAA;AAAA;;AAEA;;;;;;;;;AASO,SAASskB,iBAAT,CAA4BtF,GAA5B,EAAiCC,GAAjC,EAAsC;AACzC,QAAMI,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA;AACA;AACA,QAAMif,kBAAkB7E,0EAAeA,CAACL,aAAhB,EAA+BC,aAA/B,CAAxB;;AAEA,WAAO,UAACa,SAAD,EAAYC,SAAZ,EAA0B;AAC7B,YAAIoE,cAAc,IAAlB;AACAD,wBAAgB/qB,OAAhB,CAAwB,UAACqM,SAAD,EAAe;AACnC,gBAAIsa,UAAUta,SAAV,EAAqB8Q,aAArB,KACAyJ,UAAUva,SAAV,EAAqB8Q,aADrB,IACsC6N,WAD1C,EACuD;AACnDA,8BAAc,IAAd;AACH,aAHD,MAGO;AACHA,8BAAc,KAAd;AACH;AACJ,SAPD;AAQA,eAAOA,WAAP;AACH,KAXD;AAYH,C;;;;;;;;;;;;AC9BD;AAAA;AAAA;AAAA;AAAA;AACA;;AAEO,SAASpX,WAAT,CAAsBqX,UAAtB,EAAkCC,UAAlC,EAA8C;AACjD,WAAOxX,mEAAYA,CAACuX,UAAb,EAAyBC,UAAzB,EAAqCJ,uFAAiBA,CAACG,UAAlB,EAA8BC,UAA9B,CAArC,EAAgF,IAAhF,CAAP;AACH,C;;;;;;;;;;;;ACLD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;AAGO,SAASrX,aAAT,CAAwBoX,UAAxB,EAAoCC,UAApC,EAAgDnJ,QAAhD,EAA0D;AAC7D,WAAOrO,mEAAYA,CAACuX,UAAb,EAAyBC,UAAzB,EAAqCnJ,QAArC,EAA+C,KAA/C,EAAsDle,gDAAKA,CAACE,SAA5D,CAAP;AACH;;AAEM,SAAS+P,cAAT,CAAyBmX,UAAzB,EAAqCC,UAArC,EAAiDnJ,QAAjD,EAA2D;AAC9D,WAAOrO,mEAAYA,CAACwX,UAAb,EAAyBD,UAAzB,EAAqClJ,QAArC,EAA+C,KAA/C,EAAsDle,gDAAKA,CAACG,UAA5D,CAAP;AACH;;AAEM,SAAS+P,aAAT,CAAwBkX,UAAxB,EAAoCC,UAApC,EAAgDnJ,QAAhD,EAA0D;AAC7D,WAAO/N,oDAAKA,CAACH,cAAcoX,UAAd,EAA0BC,UAA1B,EAAsCnJ,QAAtC,CAAN,EAAuDjO,eAAemX,UAAf,EAA2BC,UAA3B,EAAuCnJ,QAAvC,CAAvD,CAAP;AACH,C;;;;;;;;;;;;ACfD;AAAA;AAAA;AAAA;;;;;;;AAOO,IAAMtO,oBAAoB,SAApBA,iBAAoB;AAAA,oCAAItL,IAAJ;AAAIA,QAAJ;AAAA;;AAAA,SAAa;AAAA,WAAM+c,GAAGzR,iBAAH,WAAwBtL,IAAxB,CAAN;AAAA,GAAb;AAAA,CAA1B;;AAEP;;;;;;;AAOO,IAAMO,OAAO,SAAPA,IAAO;AAAA,qCAAIP,IAAJ;AAAIA,QAAJ;AAAA;;AAAA,SAAa;AAAA,WAAM+c,GAAGxc,IAAH,WAAWP,IAAX,CAAN;AAAA,GAAb;AAAA,CAAb,C;;;;;;;;;;;;AChBP;AAAA;AAAA;;;;;;;AAOO,SAASoG,kBAAT,CAA6BrE,UAA7B,EAAyC6F,QAAzC,EAAmD;AACtD,QAAI7F,WAAW5J,MAAX,GAAoB,CAAxB,EAA2B;AACvB,YAAM6qB,aAAajhB,WAAWG,KAAX,CAAiB,GAAjB,CAAnB;AACA8gB,mBAAWnrB,OAAX,CAAmB,UAACorB,OAAD,EAAa;AAC5B,gBAAMC,aAAaD,QAAQ/gB,KAAR,CAAc,GAAd,CAAnB;AACA,gBAAME,QAAQ,CAAE8gB,WAAW,CAAX,CAAhB;AACA,gBAAM7gB,MAAM,EAAE6gB,WAAW,CAAX,KAAiBA,WAAW,CAAX,CAAnB,CAAZ;AACA,gBAAI7gB,OAAOD,KAAX,EAAkB;AACd,qBAAK,IAAInL,IAAImL,KAAb,EAAoBnL,KAAKoL,GAAzB,EAA8BpL,KAAK,CAAnC,EAAsC;AAClC2Q,6BAAS3Q,CAAT;AACH;AACJ;AACJ,SATD;AAUH;AACJ,C;;;;;;;;;;;;;;;;;;;;;;ACrBD;AACA;AACA;AACA;;AAEA;;;;;;;;AAQA,SAASksB,SAAT,CAAoBC,QAApB,EAA8BC,QAA9B,EAAwC;AACpC,QAAIC,gBAAJ;;AAEA,YAAQF,QAAR;AACA,aAAKrY,qDAAcA,CAACC,UAApB;AACA,aAAKtC,uDAAgBA,CAACwB,QAAtB;AACI,gBAAImZ,aAAa,KAAjB,EAAwB;AACpBC,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ;AAAA,2BAAUD,IAAIC,CAAd;AAAA,iBAAV;AACH,aAFD,MAEO;AACHuS,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ;AAAA,2BAAUA,IAAID,CAAd;AAAA,iBAAV;AACH;AACD;AACJ;AACI,gBAAIuS,aAAa,KAAjB,EAAwB;AACpBC,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ,EAAU;AAChBD,6BAAOA,CAAP;AACAC,6BAAOA,CAAP;AACA,wBAAID,MAAMC,CAAV,EAAa;AACT,+BAAO,CAAP;AACH;AACD,2BAAOD,IAAIC,CAAJ,GAAQ,CAAR,GAAY,CAAC,CAApB;AACH,iBAPD;AAQH,aATD,MASO;AACHuS,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ,EAAU;AAChBD,6BAAOA,CAAP;AACAC,6BAAOA,CAAP;AACA,wBAAID,MAAMC,CAAV,EAAa;AACT,+BAAO,CAAP;AACH;AACD,2BAAOD,IAAIC,CAAJ,GAAQ,CAAC,CAAT,GAAa,CAApB;AACH,iBAPD;AAQH;AA5BL;;AA+BA,WAAOuS,OAAP;AACH;;AAED;;;;;;;AAOA,SAASC,mBAAT,CAA8BC,QAA9B,EAAwCC,YAAxC,EAAsD;AAClD,QAAMC,YAAY/T,OAAO8T,YAAP,EAAqBE,WAArB,OAAuC,MAAvC,GAAgD,MAAhD,GAAyD,KAA3E;AACA,WAAOR,UAAUK,SAAS1mB,IAAnB,EAAyB4mB,SAAzB,CAAP;AACH;;AAED;;;;;;;AAOA,SAASE,SAAT,CAAoBrmB,IAApB,EAA0BoH,UAA1B,EAAsC;AAClC,QAAMmd,UAAU,IAAIzlB,GAAJ,EAAhB;AACA,QAAMwnB,cAAc,EAApB;;AAEAtmB,SAAK1F,OAAL,CAAa,UAAC+J,KAAD,EAAW;AACpB,YAAMkiB,WAAWliB,MAAM+C,UAAN,CAAjB;AACA,YAAImd,QAAQ7kB,GAAR,CAAY6mB,QAAZ,CAAJ,EAA2B;AACvBD,wBAAY/B,QAAQ5kB,GAAR,CAAY4mB,QAAZ,CAAZ,EAAmC,CAAnC,EAAsC/rB,IAAtC,CAA2C6J,KAA3C;AACH,SAFD,MAEO;AACHiiB,wBAAY9rB,IAAZ,CAAiB,CAAC+rB,QAAD,EAAW,CAACliB,KAAD,CAAX,CAAjB;AACAkgB,oBAAQllB,GAAR,CAAYknB,QAAZ,EAAsBD,YAAY1rB,MAAZ,GAAqB,CAA3C;AACH;AACJ,KARD;;AAUA,WAAO0rB,WAAP;AACH;;AAED;;;;;;;;;AASA,SAASE,kBAAT,CAA6BC,YAA7B,EAA2CC,YAA3C,EAAyDC,kBAAzD,EAA6E;AACzE,QAAMC,MAAM;AACRC,eAAOJ,aAAa,CAAb;AADC,KAAZ;;AAIAC,iBAAajlB,MAAb,CAAoB,UAACC,GAAD,EAAMqC,IAAN,EAAYC,GAAZ,EAAoB;AACpCtC,YAAIqC,IAAJ,IAAY0iB,aAAa,CAAb,EAAgBjtB,GAAhB,CAAoB;AAAA,mBAAS6K,MAAMsiB,mBAAmB3iB,GAAnB,EAAwBqE,KAA9B,CAAT;AAAA,SAApB,CAAZ;AACA,eAAO3G,GAAP;AACH,KAHD,EAGGklB,GAHH;;AAKA,WAAOA,GAAP;AACH;;AAED;;;;;;;AAOA,SAASE,iBAAT,CAA4B9mB,IAA5B,EAAkCC,MAAlC,EAA0C4F,cAA1C,EAA0D;AACtD,QAAIc,kBAAJ;AACA,QAAIogB,iBAAJ;AACA,QAAId,iBAAJ;AACA,QAAIvsB,IAAImM,eAAejL,MAAf,GAAwB,CAAhC;;AAEA,WAAOlB,KAAK,CAAZ,EAAeA,GAAf,EAAoB;AAChBiN,oBAAYd,eAAenM,CAAf,EAAkB,CAAlB,CAAZ;AACAqtB,mBAAWlhB,eAAenM,CAAf,EAAkB,CAAlB,CAAX;AACAusB,mBAAWpL,6DAAaA,CAAC5a,MAAd,EAAsB0G,SAAtB,CAAX;;AAEA,YAAI,CAACsf,QAAL,EAAe;AACX;AACA;AACH;;AAED,YAAIe,yDAAUA,CAACD,QAAX,CAAJ,EAA0B;AACtB;AACA5B,yEAASA,CAACnlB,IAAV,EAAgB,UAACuT,CAAD,EAAIC,CAAJ;AAAA,uBAAUuT,SAASxT,EAAE0S,SAAS5d,KAAX,CAAT,EAA4BmL,EAAEyS,SAAS5d,KAAX,CAA5B,CAAV;AAAA,aAAhB;AACH,SAHD,MAGO,IAAItH,sDAAOA,CAACgmB,QAAR,CAAJ,EAAuB;AAAA;AAC1B,oBAAMT,cAAcD,UAAUrmB,IAAV,EAAgBimB,SAAS5d,KAAzB,CAApB;AACA,oBAAM4e,YAAYF,SAASA,SAASnsB,MAAT,GAAkB,CAA3B,CAAlB;AACA,oBAAM8rB,eAAeK,SAASlqB,KAAT,CAAe,CAAf,EAAkBkqB,SAASnsB,MAAT,GAAkB,CAApC,CAArB;AACA,oBAAM+rB,qBAAqBD,aAAaltB,GAAb,CAAiB;AAAA,2BAAKqhB,6DAAaA,CAAC5a,MAAd,EAAsBlG,CAAtB,CAAL;AAAA,iBAAjB,CAA3B;;AAEAusB,4BAAYhsB,OAAZ,CAAoB,UAACmsB,YAAD,EAAkB;AAClCA,iCAAajsB,IAAb,CAAkBgsB,mBAAmBC,YAAnB,EAAiCC,YAAjC,EAA+CC,kBAA/C,CAAlB;AACH,iBAFD;;AAIAxB,6EAASA,CAACmB,WAAV,EAAuB,UAAC/S,CAAD,EAAIC,CAAJ,EAAU;AAC7B,wBAAM0T,IAAI3T,EAAE,CAAF,CAAV;AACA,wBAAMjX,IAAIkX,EAAE,CAAF,CAAV;AACA,2BAAOyT,UAAUC,CAAV,EAAa5qB,CAAb,CAAP;AACH,iBAJD;;AAMA;AACA0D,qBAAKpF,MAAL,GAAc,CAAd;AACA0rB,4BAAYhsB,OAAZ,CAAoB,UAAC+J,KAAD,EAAW;AAC3BrE,yBAAKxF,IAAL,gCAAa6J,MAAM,CAAN,CAAb;AACH,iBAFD;AAlB0B;AAqB7B,SArBM,MAqBA;AAAA;AACH,oBAAM2gB,SAASgB,oBAAoBC,QAApB,EAA8Bc,QAA9B,CAAf;AACA;AACA5B,6EAASA,CAACnlB,IAAV,EAAgB,UAACuT,CAAD,EAAIC,CAAJ;AAAA,2BAAUwR,OAAOzR,EAAE0S,SAAS5d,KAAX,CAAP,EAA0BmL,EAAEyS,SAAS5d,KAAX,CAA1B,CAAV;AAAA,iBAAhB;AAHG;AAIN;AACJ;AACJ;;AAED;;;;;;;;;AASA,IAAM8e,sBAAsB,SAAtBA,mBAAsB,CAACC,UAAD,EAAapnB,IAAb,EAAmBC,MAAnB,EAA2B4F,cAA3B,EAA8C;AACtE,QAAIuhB,WAAWxsB,MAAX,KAAsB,CAA1B,EAA6B;AAAE,eAAOoF,IAAP;AAAc;;AAE7C,QAAMqnB,YAAYD,WAAW,CAAX,CAAlB;AACA,QAAM5tB,MAAM,IAAIsF,GAAJ,EAAZ;;AAEAkB,SAAKyB,MAAL,CAAY,UAACC,GAAD,EAAM4lB,OAAN,EAAkB;AAC1B,YAAMC,OAAOD,QAAQD,UAAUhf,KAAlB,CAAb;AACA,YAAI3G,IAAIhC,GAAJ,CAAQ6nB,IAAR,CAAJ,EAAmB;AACf7lB,gBAAI/B,GAAJ,CAAQ4nB,IAAR,EAAc/sB,IAAd,CAAmB8sB,OAAnB;AACH,SAFD,MAEO;AACH5lB,gBAAIrC,GAAJ,CAAQkoB,IAAR,EAAc,CAACD,OAAD,CAAd;AACH;AACD,eAAO5lB,GAAP;AACH,KARD,EAQGlI,GARH;;AANsE;AAAA;AAAA;;AAAA;AAgBtE,6BAAuBA,GAAvB,8HAA4B;AAAA;;AAAA;;AAAA,gBAAlB5B,GAAkB;AAAA,gBAAbsa,GAAa;;AACxB,gBAAMsV,OAAOL,oBAAoBC,WAAWvqB,KAAX,CAAiB,CAAjB,CAApB,EAAyCqV,GAAzC,EAA8CjS,MAA9C,EAAsD4F,cAAtD,CAAb;AACArM,gBAAI6F,GAAJ,CAAQzH,GAAR,EAAa4vB,IAAb;AACA,gBAAI3sB,MAAMkG,OAAN,CAAcymB,IAAd,CAAJ,EAAyB;AACrBV,kCAAkBU,IAAlB,EAAwBvnB,MAAxB,EAAgC4F,cAAhC;AACH;AACJ;AAtBqE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAwBtE,WAAOrM,GAAP;AACH,CAzBD;;AA2BA;;;;;;;;;AASA,SAASiuB,cAAT,CAAyBznB,IAAzB,EAA+BC,MAA/B,EAAuC4F,cAAvC,EAAuDuhB,UAAvD,EAAmE;AAC/DvhB,qBAAiBA,eAAeyN,MAAf,CAAsB,UAACoU,MAAD,EAAY;AAC/C,YAAIA,OAAO,CAAP,MAAc,IAAlB,EAAwB;AACpBN,uBAAW5sB,IAAX,CAAgBktB,OAAO,CAAP,CAAhB;AACA,mBAAO,KAAP;AACH;AACD,eAAO,IAAP;AACH,KANgB,CAAjB;AAOA,QAAI7hB,eAAejL,MAAf,KAA0B,CAA9B,EAAiC;AAAE,eAAOoF,IAAP;AAAc;;AAEjDonB,iBAAaA,WAAW5tB,GAAX,CAAe;AAAA,eAAKqhB,6DAAaA,CAAC5a,MAAd,EAAsBrD,CAAtB,CAAL;AAAA,KAAf,CAAb;;AAEA,QAAM+qB,iBAAiBR,oBAAoBC,UAApB,EAAgCpnB,IAAhC,EAAsCC,MAAtC,EAA8C4F,cAA9C,CAAvB;AACA,WAAO7F,KAAKxG,GAAL,CAAS,UAACQ,GAAD,EAAS;AACrB,YAAIN,IAAI,CAAR;AACA,YAAIkuB,UAAUD,cAAd;;AAEA,eAAO,CAAC9sB,MAAMkG,OAAN,CAAc6mB,OAAd,CAAR,EAAgC;AAC5BA,sBAAUA,QAAQjoB,GAAR,CAAY3F,IAAIotB,WAAW1tB,GAAX,EAAgB2O,KAApB,CAAZ,CAAV;AACH;;AAED,eAAOuf,QAAQC,KAAR,EAAP;AACH,KATM,CAAP;AAUH;;AAED;;;;;;AAMO,SAASjG,QAAT,CAAmBkG,OAAnB,EAA4BjiB,cAA5B,EAA4C;AAAA,QACzC5F,MADyC,GACxB6nB,OADwB,CACzC7nB,MADyC;AAAA,QACjCD,IADiC,GACxB8nB,OADwB,CACjC9nB,IADiC;;;AAG/C6F,qBAAiBA,eAAeyN,MAAf,CAAsB;AAAA,eAAW,CAAC,CAACuH,6DAAaA,CAAC5a,MAAd,EAAsB8nB,QAAQ,CAAR,CAAtB,CAAb;AAAA,KAAtB,CAAjB;AACA,QAAIliB,eAAejL,MAAf,KAA0B,CAA9B,EAAiC;AAAE;AAAS;;AAE5C,QAAIotB,kBAAkBniB,eAAewB,SAAf,CAAyB;AAAA,eAAW0gB,QAAQ,CAAR,MAAe,IAA1B;AAAA,KAAzB,CAAtB;AACAC,sBAAkBA,oBAAoB,CAAC,CAArB,GAAyBA,eAAzB,GAA2CniB,eAAejL,MAA5E;;AAEA,QAAMqtB,yBAAyBpiB,eAAehJ,KAAf,CAAqB,CAArB,EAAwBmrB,eAAxB,CAA/B;AACA,QAAME,sBAAsBriB,eAAehJ,KAAf,CAAqBmrB,eAArB,CAA5B;;AAEAlB,sBAAkB9mB,IAAlB,EAAwBC,MAAxB,EAAgCgoB,sBAAhC;AACAjoB,WAAOynB,eAAeznB,IAAf,EAAqBC,MAArB,EAA6BioB,mBAA7B,EAAkDD,uBAAuBzuB,GAAvB,CAA2B;AAAA,eAAUkuB,OAAO,CAAP,CAAV;AAAA,KAA3B,CAAlD,CAAP;;AAEAI,YAAQpkB,IAAR,GAAe1D,KAAKxG,GAAL,CAAS;AAAA,eAAOQ,IAAImuB,GAAJ,EAAP;AAAA,KAAT,CAAf;AACAL,YAAQ9nB,IAAR,GAAeA,IAAf;AACH,C;;;;;;;;;;;;AChQD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;;;;;;;AAOO,SAASsO,KAAT,CAAgBwR,GAAhB,EAAqBC,GAArB,EAA0B;AAC7B,QAAM+B,YAAY,EAAlB;AACA,QAAM7hB,SAAS,EAAf;AACA,QAAM8hB,gBAAgB,EAAtB;AACA,QAAM/hB,OAAO,EAAb;AACA,QAAMmgB,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA,QAAM4b,wBAAwB7B,cAAclZ,SAAd,EAA9B;AACA,QAAMgb,wBAAwB7B,cAAcnZ,SAAd,EAA9B;AACA,QAAMxN,OAAU0mB,cAAc1mB,IAAxB,eAAsC2mB,cAAc3mB,IAA1D;;AAEA;AACA,QAAI,CAACyoB,gEAAUA,CAACpC,IAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAX,EAAiD+c,IAAIxc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAjD,CAAL,EAA6F;AACzF,eAAO,IAAP;AACH;;AAED;AACC8c,QAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,CAAD,CAAgCrK,OAAhC,CAAwC,UAACqM,SAAD,EAAe;AACnD,YAAM9E,QAAQmgB,sBAAsBrb,SAAtB,CAAd;AACA1G,eAAOzF,IAAP,CAAYuf,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAZ;AACA8hB,sBAAcvnB,IAAd,CAAmBqH,MAAM5B,MAAN,GAAexG,IAAlC;AACH,KAJD;;AAMA;;;;;;AAMA,aAAS0oB,iBAAT,CAA4B3C,EAA5B,EAAgCvY,SAAhC,EAA2C;AACvC4B,wFAAkBA,CAAC2W,GAAGnc,WAAtB,EAAmC,UAAC3J,CAAD,EAAO;AACtC,gBAAMmnB,QAAQ,EAAd;AACA,gBAAIwB,WAAW,EAAf;AACAN,0BAAcznB,OAAd,CAAsB,UAACgoB,UAAD,EAAgB;AAClC,oBAAMzqB,QAAQoP,UAAUqb,UAAV,EAAsBtb,YAAtB,CAAmChH,IAAnC,CAAwCtG,CAAxC,CAAd;AACA2oB,kCAAgBxqB,KAAhB;AACAgpB,sBAAMyB,UAAN,IAAoBzqB,KAApB;AACH,aAJD;AAKA,gBAAI,CAACiqB,UAAUO,QAAV,CAAL,EAA0B;AACtBriB,qBAAKxF,IAAL,CAAUqmB,KAAV;AACAiB,0BAAUO,QAAV,IAAsB,IAAtB;AACH;AACJ,SAZD;AAaH;;AAED;AACAF,sBAAkBrC,GAAlB,EAAuBkC,qBAAvB;AACAG,sBAAkBpC,GAAlB,EAAuBkC,qBAAvB;;AAEA,WAAO,IAAIzf,+CAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;AC7DD;AACA;AACA;AAOA;;AAEA;;;;;;;;;;;;IAWMgT,Q;;AAEF;;;;;;;;;;AAUA,wBAAwB;AAAA;;AACpB,YAAI2b,eAAJ;;AAEA,aAAKrM,OAAL,GAAe,IAAf;AACA,aAAK7G,WAAL,GAAmB,EAAnB;AACA,aAAKO,mBAAL,GAA2B,EAA3B;AACA,aAAKoG,SAAL,GAAiB,EAAjB;;AANoB,0CAARvW,MAAQ;AAARA,kBAAQ;AAAA;;AAQpB,YAAIA,OAAO1K,MAAP,KAAkB,CAAlB,IAAwB,CAACwtB,SAAS9iB,OAAO,CAAP,CAAV,aAAgCmH,QAA5D,EAAuE;AACnE;AACA,iBAAKlJ,cAAL,GAAsB6kB,OAAO7kB,cAA7B;AACA,iBAAKF,WAAL,GAAmB+kB,OAAO/kB,WAA1B;AACA,iBAAK8C,WAAL,GAAmBiiB,OAAOjiB,WAA1B;AACA,iBAAK4V,OAAL,GAAeqM,MAAf;AACA,iBAAKvhB,kBAAL,GAA0B,KAAKkV,OAAL,CAAalV,kBAAvC;AACA,iBAAKwhB,eAAL,GAAuB9Y,0DAAWA,EAAlC;AACA,iBAAK7H,qBAAL,GAA6BC,qBAA7B;AACH,SATD,MASO;AACH2S,8DAAUA,mBAAC,IAAX,SAAoBhV,MAApB;AACA,iBAAK+iB,eAAL,GAAuB,KAAKxhB,kBAAL,CAAwBpN,IAA/C;AACA,iBAAKiO,qBAAL,GAA6BC,qBAA7B;AACA,iBAAKgC,qBAAL,GAA6B;AACzB+S,gCAAgB,EADS;AAEzBY,kCAAkB;AAFO,aAA7B;AAIH;AACJ;;AAED;;;;;;;;;;;;;;;;;;;;;;;;oCAoBa;AACT,mBAAO,KAAKlX,aAAL,GAAqBxE,MAArB,CAA4BpI,GAA5B,CAAgC;AAAA,uBAAK8J,EAAErD,MAAF,EAAL;AAAA,aAAhC,CAAP;AACH;;AAED;;;;;;;;;;;kCAQU;AACN,mBAAO,KAAKooB,eAAZ;AACH;;;wCAEgB;AACb,mBAAO,KAAKC,WAAZ;AACH;;;gDAEwB;AACrB,iBAAKA,WAAL,GAAmB9T,4DAAYA,CAAC,CAAC,KAAKnR,WAAN,EAAmB,KAAKE,cAAxB,CAAb,EACd,KAAKN,oBAAL,EADc,EACe,KAAKolB,eADpB,CAAnB;AAEA,mBAAO,IAAP;AACH;;;+CAEuB;AACpB,mBAAO,KAAKxhB,kBAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6BA8BM0hB,Q,EAAUlM,Q,EAAU;AACtB,mBAAOrO,8DAAYA,CAAC,IAAb,EAAmBua,QAAnB,EAA6BlM,QAA7B,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;oCAoBakM,Q,EAAU;AACnB,mBAAOva,8DAAYA,CAAC,IAAb,EAAmBua,QAAnB,EAA6BnD,mEAAiBA,CAAC,IAAlB,EAAwBmD,QAAxB,CAA7B,EAAgE,IAAhE,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;8BAkBOC,S,EAAW;AACd,mBAAOla,wDAAM,IAAN,EAAYka,SAAZ,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;mCAiBYC,c,EAAgB;AACxB,mBAAOxa,6DAAW,IAAX,EAAiBwa,cAAjB,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;+BA+CQxR,Q,EAAU9R,M,EAAQ;AACtB,gBAAMsG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;AAIAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;AACAA,mBAAOuG,IAAP,GAAcvG,OAAOuG,IAAP,IAAeD,UAAUC,IAAvC;;AAEA,gBAAM4N,cAAc,EAAElU,WAAWD,OAAOC,SAApB,EAApB;AACA,mBAAOiU,+DAAeA,CAClB,IADG,EAEHpC,QAFG,EAGH9R,MAHG,EAIHmU,WAJG,CAAP;AAMH;;AAED;;;;;;;;;;;;;;;;;;;;;kCAkBW;AACP,mBAAO,CAAC,KAAKjW,WAAL,CAAiBzI,MAAlB,IAA4B,CAAC,KAAK2I,cAAL,CAAoB3I,MAAxD;AACH;;AAED;;;;;;;;;;gCAOyB;AAAA,gBAAlBwK,SAAkB,uEAAN,IAAM;;AACrB,gBAAM4R,WAAW,IAAI,KAAK9Q,WAAT,CAAqB,IAArB,CAAjB;AACA,gBAAId,SAAJ,EAAe;AACX4R,yBAASpR,SAAT,CAAmB,IAAnB;AACH,aAFD,MAEO;AACHoR,yBAASpR,SAAT,CAAmB,IAAnB;AACH;AACD,mBAAOoR,QAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;gCA2CS0C,S,EAAWvU,M,EAAQ;AACxB,gBAAMsG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;AAIAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;AACA,gBAAM6G,cAAc,KAAKhE,eAAL,EAApB;AACA,gBAAMiE,YAAY7R,OAAOmI,IAAP,CAAYyJ,WAAZ,CAAlB;AAPwB,0BAQP7G,MARO;AAAA,gBAQhBuG,IARgB,WAQhBA,IARgB;;AASxB,gBAAMgS,sBAAsBtR,sEAAsBA,CAACsN,SAAvB,EAAkCzN,SAAlC,EAA6CD,WAA7C,CAA5B;;AAEA,gBAAIgP,kBAAJ;;AAEA,gBAAItP,SAASC,oDAAaA,CAACmB,GAA3B,EAAgC;AAC5B,oBAAI4b,kBAAkBjP,gEAAgBA,CAAC,IAAjB,EAAuBiE,mBAAvB,EAA4C;AAC9DhS,0BAAMC,oDAAaA,CAACC,MAD0C;AAE9DxG,+BAAWD,OAAOC;AAF4C,iBAA5C,EAGnB6G,SAHmB,CAAtB;AAIA,oBAAI0c,iBAAiBlP,gEAAgBA,CAAC,IAAjB,EAAuBiE,mBAAvB,EAA4C;AAC7DhS,0BAAMC,oDAAaA,CAACkB,OADyC;AAE7DzH,+BAAWD,OAAOC;AAF2C,iBAA5C,EAGlB6G,SAHkB,CAArB;AAIA+O,4BAAY,CAAC0N,eAAD,EAAkBC,cAAlB,CAAZ;AACH,aAVD,MAUO;AACH,oBAAID,mBAAkBjP,gEAAgBA,CAAC,IAAjB,EAAuBiE,mBAAvB,EAA4CvY,MAA5C,EAAoD8G,SAApD,CAAtB;AACA+O,4BAAY0N,gBAAZ;AACH;;AAED,mBAAO1N,SAAP;AACH;;;0CAEkB;AACf,mBAAO,KAAK4N,YAAZ;AACH;;;gDAEwB;AACrB,iBAAKA,YAAL,GAAoB,KAAKN,WAAL,CAAiB1mB,MAAjB,CAAwBH,MAAxB,CAA+B,UAACC,GAAD,EAAMmnB,QAAN,EAAgBnvB,CAAhB,EAAsB;AACrEgI,oBAAImnB,SAASpvB,IAAT,EAAJ,IAAuB;AACnB4O,2BAAO3O,CADY;AAEnBmf,yBAAKgQ,SAAS5oB,MAAT;AAFc,iBAAvB;AAIA,uBAAOyB,GAAP;AACH,aANmB,EAMjB,EANiB,CAApB;AAOA,mBAAO,IAAP;AACH;;AAGD;;;;;;;;;kCAMW;AACP,iBAAKqa,OAAL,IAAgB,KAAKA,OAAL,CAAa+M,WAAb,CAAyB,IAAzB,CAAhB;AACA,iBAAK/M,OAAL,GAAe,IAAf;AACA,iBAAKF,SAAL,CAAevhB,OAAf,CAAuB,UAACwhB,KAAD,EAAW;AAC9BA,sBAAMC,OAAN,GAAgB,IAAhB;AACH,aAFD;AAGA,iBAAKF,SAAL,GAAiB,EAAjB;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;oCA0BaC,K,EAAO;AAChB,gBAAI9X,MAAM,KAAK6X,SAAL,CAAexU,SAAf,CAAyB;AAAA,uBAAW0hB,YAAYjN,KAAvB;AAAA,aAAzB,CAAV;AACA9X,oBAAQ,CAAC,CAAT,GAAa,KAAK6X,SAAL,CAAeta,MAAf,CAAsByC,GAAtB,EAA2B,CAA3B,CAAb,GAA6C,IAA7C;AACH;;AAED;;;;;;;;kCAKWglB,M,EAAQ;AACf,iBAAKjN,OAAL,IAAgB,KAAKA,OAAL,CAAa+M,WAAb,CAAyB,IAAzB,CAAhB;AACA,iBAAK/M,OAAL,GAAeiN,MAAf;AACAA,sBAAUA,OAAOnN,SAAP,CAAiBrhB,IAAjB,CAAsB,IAAtB,CAAV;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;oCAwBa;AACT,mBAAO,KAAKuhB,OAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;sCAyBe;AACX,mBAAO,KAAKF,SAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;yCAwBkB;AACd,mBAAO,KAAK3G,WAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;iDAwB0B;AACtB,mBAAO,KAAKO,mBAAZ;AACH;;;;;;AAGUhJ,uEAAf,E;;;;;;;;;;;;ACjkBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;IAEemW,G,GAAoDc,kE,CAApDd,G;IAAKI,G,GAA+CU,kE,CAA/CV,G;IAAKzS,G,GAA0CmT,kE,CAA1CnT,G;IAAKE,G,GAAqCiT,kE,CAArCjT,G;IAAKyS,K,GAAgCQ,kE,CAAhCR,K;IAAOC,I,GAAyBO,kE,CAAzBP,I;IAAMC,K,GAAmBM,kE,CAAnBN,K;IAAY6F,E,GAAOvF,kE,CAAZF,G;;;;;;;;;;;;;ACFvD;AAAA;;;;;;AAMe,yEAAC3kB,KAAD,EAAW;AACtB,QAAInF,IAAI,CAAR;AACA,WAAO,YAAe;AAAA,0CAAXkI,MAAW;AAAXA,kBAAW;AAAA;;AAClBA,eAAOtH,OAAP,CAAe,UAAC4X,GAAD,EAAM9K,UAAN,EAAqB;AAChC,gBAAI,EAAEvI,MAAMuI,UAAN,aAA6BvM,KAA/B,CAAJ,EAA2C;AACvCgE,sBAAMuI,UAAN,IAAoBvM,MAAM+iB,IAAN,CAAW,EAAEhjB,QAAQlB,CAAV,EAAX,CAApB;AACH;AACDmF,kBAAMuI,UAAN,EAAkB5M,IAAlB,CAAuB0X,GAAvB;AACH,SALD;AAMAxY;AACH,KARD;AASH,CAXD,E;;;;;;;;;;;;;;;;ACNA;;;;;;AAMA,SAASwvB,mBAAT,CAA8BjuB,IAA9B,EAAoC;AAChC,QAAIA,gBAAgB9C,IAApB,EAA0B;AACtB,eAAO8C,IAAP;AACH;;AAED,WAAO,IAAI9C,IAAJ,CAAS8C,IAAT,CAAP;AACH;AACD;;;;;;;AAOA,SAASR,GAAT,CAAc6B,CAAd,EAAiB;AACb,WAAQA,IAAI,EAAL,SAAgBA,CAAhB,GAAuBA,CAA9B;AACH;AACD;;;;;;;;;AASA;;;;;;;AAOAP,OAAOotB,MAAP,GAAgB,UAAUjtB,IAAV,EAAgB;AAC5B,WAAOA,KAAKY,OAAL,CAAa,0BAAb,EAAyC,MAAzC,CAAP;AACH,CAFD;;AAIA;;;;;;;;AAQA,0BAA2B,SAAS4R,iBAAT,CAA4B/V,MAA5B,EAAoC;AAC3D,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKywB,QAAL,GAAgB7kB,SAAhB;AACA,SAAKyO,UAAL,GAAkBzO,SAAlB;AACH;;AAED;AACAmK,kBAAkB2a,YAAlB,GAAiC,GAAjC;;AAEA;AACA;AACA3a,kBAAkB4a,uBAAlB,GAA4C;AACxCC,UAAM,CADkC;AAExCC,WAAO,CAFiC;AAGxCC,SAAK,CAHmC;AAIxCC,UAAM,CAJkC;AAKxCC,YAAQ,CALgC;AAMxCC,YAAQ,CANgC;AAOxCC,iBAAa;AAP2B,CAA5C;;AAUA;;;;;;;AAOAnb,kBAAkBob,mBAAlB,GAAwC,UAAUC,MAAV,EAAkB;AACtD,WAAO,UAAU7X,GAAV,EAAe;AAClB,YAAIW,kBAAJ;AACA,YAAIkB,SAASlB,YAAYmX,SAAS9X,GAAT,EAAc,EAAd,CAArB,CAAJ,EAA6C;AACzC,mBAAOW,SAAP;AACH;;AAED,eAAOkX,MAAP;AACH,KAPD;AAQH,CATD;;AAWA;;;;;;;;AAQArb,kBAAkBub,kBAAlB,GAAuC,UAAU3R,KAAV,EAAiByR,MAAjB,EAAyB;AAC5D,WAAO,UAAC7X,GAAD,EAAS;AACZ,YAAIxY,UAAJ;AACA,YAAIwwB,UAAJ;;AAEA,YAAI,CAAChY,GAAL,EAAU;AAAE,mBAAO6X,MAAP;AAAgB;;AAE5B,YAAMI,OAAOjY,IAAIkU,WAAJ,EAAb;;AAEA,aAAK1sB,IAAI,CAAJ,EAAOwwB,IAAI5R,MAAM1d,MAAtB,EAA8BlB,IAAIwwB,CAAlC,EAAqCxwB,GAArC,EAA0C;AACtC,gBAAI4e,MAAM5e,CAAN,EAAS0sB,WAAT,OAA2B+D,IAA/B,EAAqC;AACjC,uBAAOzwB,CAAP;AACH;AACJ;;AAED,YAAIA,MAAM6K,SAAV,EAAqB;AACjB,mBAAOwlB,MAAP;AACH;AACD,eAAO,IAAP;AACH,KAlBD;AAmBH,CApBD;;AAsBA;;;;;;;;;;;;;;;;;AAiBArb,kBAAkB0b,mBAAlB,GAAwC,YAAY;AAChD,QAAMC,UAAU;AACZC,eAAO,CACH,KADG,EAEH,KAFG,EAGH,KAHG,EAIH,KAJG,EAKH,KALG,EAMH,KANG,EAOH,KAPG,CADK;AAUZC,cAAM,CACF,QADE,EAEF,QAFE,EAGF,SAHE,EAIF,WAJE,EAKF,UALE,EAMF,QANE,EAOF,UAPE;AAVM,KAAhB;AAoBA,QAAMC,YAAY;AACdF,eAAO,CACH,KADG,EAEH,KAFG,EAGH,KAHG,EAIH,KAJG,EAKH,KALG,EAMH,KANG,EAOH,KAPG,EAQH,KARG,EASH,KATG,EAUH,KAVG,EAWH,KAXG,EAYH,KAZG,CADO;AAedC,cAAM,CACF,SADE,EAEF,UAFE,EAGF,OAHE,EAIF,OAJE,EAKF,KALE,EAMF,MANE,EAOF,MAPE,EAQF,QARE,EASF,WATE,EAUF,SAVE,EAWF,UAXE,EAYF,UAZE;AAfQ,KAAlB;;AA+BA,QAAME,cAAc;AAChBC,WAAG;AACC;AACAjxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;;AAEA,uBAAO5O,EAAEsnB,QAAF,GAAaC,QAAb,EAAP;AACH;AAVF,SADa;AAahBX,WAAG;AACC;AACAzwB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMhX,QAAQoI,EAAEsnB,QAAF,KAAe,EAA7B;;AAEA,uBAAO,CAAC1vB,UAAU,CAAV,GAAc,EAAd,GAAmBA,KAApB,EAA2B2vB,QAA3B,EAAP;AACH;AAXF,SAba;AA0BhBC,WAAG;AACC;AACArxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,SAAP;AAAmB,aAJjC;;AAKC/Y,oBAAQ,gBAACM,GAAD,EAAS;AACb,oBAAIA,GAAJ,EAAS;AACL,2BAAOA,IAAIkU,WAAJ,EAAP;AACH;AACD,uBAAO,IAAP;AACH,aAVF;AAWCvjB,uBAAW,mBAACqP,GAAD,EAAS;AAChB,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMhX,QAAQoI,EAAEsnB,QAAF,EAAd;;AAEA,uBAAQ1vB,QAAQ,EAAR,GAAa,IAAb,GAAoB,IAA5B;AACH;AAhBF,SA1Ba;AA4ChB6vB,WAAG;AACC;AACAtxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,SAAP;AAAmB,aAJjC;;AAKC/Y,oBAAQ,gBAACM,GAAD,EAAS;AACb,oBAAIA,GAAJ,EAAS;AACL,2BAAOA,IAAIkU,WAAJ,EAAP;AACH;AACD,uBAAO,IAAP;AACH,aAVF;AAWCvjB,uBAAW,mBAACqP,GAAD,EAAS;AAChB,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMhX,QAAQoI,EAAEsnB,QAAF,EAAd;;AAEA,uBAAQ1vB,QAAQ,EAAR,GAAa,IAAb,GAAoB,IAA5B;AACH;AAhBF,SA5Ca;AA8DhB8vB,WAAG;AACC;AACAvxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM+Y,OAAO3nB,EAAE4nB,UAAF,EAAb;;AAEA,uBAAOzwB,IAAIwwB,IAAJ,CAAP;AACH;AAXF,SA9Da;AA2EhBE,WAAG;AACC;AACA1xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM5W,UAAUgI,EAAE8nB,UAAF,EAAhB;;AAEA,uBAAO3wB,IAAIa,OAAJ,CAAP;AACH;AAXF,SA3Ea;AAwFhB+vB,WAAG;AACC;AACA5xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMoZ,KAAKhoB,EAAEioB,eAAF,EAAX;;AAEA,uBAAOD,GAAGT,QAAH,EAAP;AACH;AAXF,SAxFa;AAqGhBtX,WAAG;AACC;AACA9Z,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWN,QAAQC,KAAR,CAAczwB,IAAd,CAAmB,GAAnB,CAAX;AAAwC,aAJtD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCI,QAAQC,KAA7C,CALT;AAMCznB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEmoB,MAAF,EAAZ;;AAEA,uBAAQpB,QAAQC,KAAR,CAAckB,GAAd,CAAD,CAAqBX,QAArB,EAAP;AACH;AAXF,SArGa;AAkHhBa,WAAG;AACC;AACAjyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWN,QAAQE,IAAR,CAAa1wB,IAAb,CAAkB,GAAlB,CAAX;AAAuC,aAJrD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCI,QAAQE,IAA7C,CALT;AAMC1nB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEmoB,MAAF,EAAZ;;AAEA,uBAAQpB,QAAQE,IAAR,CAAaiB,GAAb,CAAD,CAAoBX,QAApB,EAAP;AACH;AAXF,SAlHa;AA+HhBjnB,WAAG;AACC;AACAnK,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEqoB,OAAF,EAAZ;;AAEA,uBAAOH,IAAIX,QAAJ,EAAP;AACH;AAXF,SA/Ha;AA4IhBvnB,WAAG;AACC;AACA7J,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEqoB,OAAF,EAAZ;;AAEA,uBAAOlxB,IAAI+wB,GAAJ,CAAP;AACH;AAXF,SA5Ia;AAyJhBhY,WAAG;AACC;AACA/Z,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWH,UAAUF,KAAV,CAAgBzwB,IAAhB,CAAqB,GAArB,CAAX;AAA0C,aAJxD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCO,UAAUF,KAA/C,CALT;AAMCznB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM0Z,QAAQtoB,EAAEuoB,QAAF,EAAd;;AAEA,uBAAQrB,UAAUF,KAAV,CAAgBsB,KAAhB,CAAD,CAAyBf,QAAzB,EAAP;AACH;AAXF,SAzJa;AAsKhBiB,WAAG;AACC;AACAryB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWH,UAAUD,IAAV,CAAe1wB,IAAf,CAAoB,GAApB,CAAX;AAAyC,aAJvD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCO,UAAUD,IAA/C,CALT;AAMC1nB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM0Z,QAAQtoB,EAAEuoB,QAAF,EAAd;;AAEA,uBAAQrB,UAAUD,IAAV,CAAeqB,KAAf,CAAD,CAAwBf,QAAxB,EAAP;AACH;AAXF,SAtKa;AAmLhB3D,WAAG;AACC;AACAztB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;AAKC/Y,kBALD,kBAKSM,GALT,EAKc;AAAE,uBAAOxD,kBAAkBob,mBAAlB,GAAwC5X,GAAxC,IAA+C,CAAtD;AAA0D,aAL1E;AAMCrP,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM0Z,QAAQtoB,EAAEuoB,QAAF,EAAd;;AAEA,uBAAOpxB,IAAImxB,QAAQ,CAAZ,CAAP;AACH;AAXF,SAnLa;AAgMhBG,WAAG;AACC;AACAtyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,UAAP;AAAoB,aAJlC;AAKC/Y,kBALD,kBAKSM,GALT,EAKc;AACT,oBAAIG,eAAJ;AACA,oBAAIH,GAAJ,EAAS;AACL,wBAAMgY,IAAIhY,IAAItX,MAAd;AACAsX,0BAAMA,IAAI8Z,SAAJ,CAAc9B,IAAI,CAAlB,EAAqBA,CAArB,CAAN;AACH;AACD,oBAAIrX,YAAYnE,kBAAkBob,mBAAlB,GAAwC5X,GAAxC,CAAhB;AACA,oBAAI+Z,cAAc,IAAI9zB,IAAJ,EAAlB;AACA,oBAAI+zB,cAAcpY,KAAKqY,KAAL,CAAYF,YAAYG,WAAZ,EAAD,GAA8B,GAAzC,CAAlB;;AAEA/Z,8BAAY6Z,WAAZ,GAA0BrZ,SAA1B;;AAEA,oBAAIqW,oBAAoB7W,MAApB,EAA4B+Z,WAA5B,KAA4CH,YAAYG,WAAZ,EAAhD,EAA2E;AACvE/Z,mCAAY6Z,cAAc,CAA1B,IAA8BrZ,SAA9B;AACH;AACD,uBAAOqW,oBAAoB7W,MAApB,EAA4B+Z,WAA5B,EAAP;AACH,aArBF;AAsBCvpB,qBAtBD,qBAsBYqP,GAtBZ,EAsBiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAInX,OAAOuI,EAAE8oB,WAAF,GAAgBvB,QAAhB,EAAX;AACA,oBAAIX,UAAJ;;AAEA,oBAAInvB,IAAJ,EAAU;AACNmvB,wBAAInvB,KAAKH,MAAT;AACAG,2BAAOA,KAAKixB,SAAL,CAAe9B,IAAI,CAAnB,EAAsBA,CAAtB,CAAP;AACH;;AAED,uBAAOnvB,IAAP;AACH;AAjCF,SAhMa;AAmOhBsxB,WAAG;AACC;AACA5yB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,UAAP;AAAoB,aAJlC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMnX,OAAOuI,EAAE8oB,WAAF,GAAgBvB,QAAhB,EAAb;;AAEA,uBAAO9vB,IAAP;AACH;AAXF;AAnOa,KAApB;;AAkPA,WAAO0vB,WAAP;AACH,CAvSD;;AAySA;;;;;;AAMA/b,kBAAkB4d,mBAAlB,GAAwC,YAAY;AAChD,QAAM7B,cAAc/b,kBAAkB0b,mBAAlB,EAApB;;AAEA,WAAO;AACHV,cAAMe,YAAYC,CADf;AAEH6B,iBAAS9B,YAAYP,CAFlB;AAGHsC,wBAAgB/B,YAAYK,CAHzB;AAIH2B,wBAAgBhC,YAAYM,CAJzB;AAKHpB,gBAAQc,YAAYO,CALjB;AAMHpB,gBAAQa,YAAYU,CANjB;AAOHuB,mBAAWjC,YAAYlX,CAPpB;AAQHoZ,kBAAUlC,YAAYiB,CARnB;AASHkB,sBAAcnC,YAAY7mB,CATvB;AAUHipB,qCAA6BpC,YAAYnnB,CAVtC;AAWHwpB,qBAAarC,YAAYjX,CAXtB;AAYHuZ,oBAAYtC,YAAYqB,CAZrB;AAaHkB,uBAAevC,YAAYvD,CAbxB;AAcH+F,oBAAYxC,YAAYsB,CAdrB;AAeHmB,mBAAWzC,YAAY4B;AAfpB,KAAP;AAiBH,CApBD;;AAsBA;;;;;;;AAOA3d,kBAAkBye,aAAlB,GAAkC,YAAY;AAC1C,QAAM1C,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAMgD,kBAAkB,SAAlBA,eAAkB,GAAa;AAAE;AACnC,YAAI1zB,IAAI,CAAR;AACA,YAAIktB,YAAJ;AACA,YAAIyG,oBAAJ;AACA,YAAMnD,IAAI,UAAKtvB,MAAf;;AAEA,eAAOlB,IAAIwwB,CAAX,EAAcxwB,GAAd,EAAmB;AACfktB,sCAAWltB,CAAX,yBAAWA,CAAX;AACA,oCAASA,CAAT,yBAASA,CAAT,GAAa;AACT2zB,8BAAczG,GAAd;AACH;AACJ;;AAED,YAAI,CAACyG,WAAL,EAAkB;AAAE,mBAAO,IAAP;AAAc;;AAElC,eAAOA,YAAY,CAAZ,EAAezb,MAAf,CAAsByb,YAAY,CAAZ,CAAtB,CAAP;AACH,KAhBD;;AAkBA,WAAO;AACH9D,cAAM,CAACkB,YAAYsB,CAAb,EAAgBtB,YAAY4B,CAA5B,EACFe,eADE,CADH;AAIH5D,eAAO,CAACiB,YAAYjX,CAAb,EAAgBiX,YAAYqB,CAA5B,EAA+BrB,YAAYvD,CAA3C,EACHkG,eADG,CAJJ;AAOH3D,aAAK,CAACgB,YAAYlX,CAAb,EAAgBkX,YAAYiB,CAA5B,EAA+BjB,YAAY7mB,CAA3C,EAA8C6mB,YAAYnnB,CAA1D,EACD8pB,eADC,CAPF;AAUH1D,cAAM,CAACe,YAAYC,CAAb,EAAgBD,YAAYP,CAA5B,EAA+BO,YAAYK,CAA3C,EAA8CL,YAAYM,CAA1D,EACF,UAAUuC,YAAV,EAAwBC,YAAxB,EAAsCC,SAAtC,EAAiDC,SAAjD,EAA4D;AACxD,gBAAIJ,oBAAJ;AACA,gBAAIK,eAAJ;AACA,gBAAIC,aAAJ;AACA,gBAAIzb,YAAJ;;AAEA,gBAAIqb,iBAAiBG,SAAUF,aAAaC,SAAxC,CAAJ,EAAyD;AACrD,oBAAIC,OAAO,CAAP,EAAU9b,MAAV,CAAiB8b,OAAO,CAAP,CAAjB,MAAgC,IAApC,EAA0C;AACtCC,2BAAO,IAAP;AACH;;AAEDN,8BAAcE,YAAd;AACH,aAND,MAMO,IAAIA,YAAJ,EAAkB;AACrBF,8BAAcE,YAAd;AACH,aAFM,MAEA;AACHF,8BAAcC,YAAd;AACH;;AAED,gBAAI,CAACD,WAAL,EAAkB;AAAE,uBAAO,IAAP;AAAc;;AAElCnb,kBAAMmb,YAAY,CAAZ,EAAezb,MAAf,CAAsByb,YAAY,CAAZ,CAAtB,CAAN;AACA,gBAAIM,IAAJ,EAAU;AACNzb,uBAAO,EAAP;AACH;AACD,mBAAOA,GAAP;AACH,SA1BC,CAVH;AAsCHyX,gBAAQ,CAACc,YAAYO,CAAb,EACJoC,eADI,CAtCL;AAyCHxD,gBAAQ,CAACa,YAAYU,CAAb,EACJiC,eADI;AAzCL,KAAP;AA6CH,CAjED;;AAmEA;;;;;AAKA1e,kBAAkBkf,UAAlB,GAA+B,UAAUj1B,MAAV,EAAkB;AAC7C,QAAMk1B,cAAcnf,kBAAkB2a,YAAtC;AACA,QAAMoB,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAM0D,gBAAgB1zB,OAAOmI,IAAP,CAAYkoB,WAAZ,CAAtB;AACA,QAAMsD,aAAa,EAAnB;AACA,QAAIr0B,UAAJ;AACA,QAAIs0B,oBAAJ;;AAEA,WAAO,CAACt0B,IAAIf,OAAOsL,OAAP,CAAe4pB,WAAf,EAA4Bn0B,IAAI,CAAhC,CAAL,KAA4C,CAAnD,EAAsD;AAClDs0B,sBAAcr1B,OAAOe,IAAI,CAAX,CAAd;AACA,YAAIo0B,cAAc7pB,OAAd,CAAsB+pB,WAAtB,MAAuC,CAAC,CAA5C,EAA+C;AAAE;AAAW;;AAE5DD,mBAAWvzB,IAAX,CAAgB;AACZ6N,mBAAO3O,CADK;AAEZgD,mBAAOsxB;AAFK,SAAhB;AAIH;;AAED,WAAOD,UAAP;AACH,CAnBD;;AAqBA;;;;;;AAMArf,kBAAkBuF,QAAlB,GAA6B,UAAUhZ,IAAV,EAAgBtC,MAAhB,EAAwB;AACjD,QAAMs1B,QAAQ/E,oBAAoBjuB,IAApB,CAAd;AACA,QAAM8yB,aAAarf,kBAAkBkf,UAAlB,CAA6Bj1B,MAA7B,CAAnB;AACA,QAAM8xB,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAI8D,eAAe9b,OAAOzZ,MAAP,CAAnB;AACA,QAAMk1B,cAAcnf,kBAAkB2a,YAAtC;AACA,QAAI3sB,cAAJ;AACA,QAAIyxB,qBAAJ;AACA,QAAIz0B,UAAJ;AACA,QAAIwwB,UAAJ;;AAEA,SAAKxwB,IAAI,CAAJ,EAAOwwB,IAAI6D,WAAWnzB,MAA3B,EAAmClB,IAAIwwB,CAAvC,EAA0CxwB,GAA1C,EAA+C;AAC3CgD,gBAAQqxB,WAAWr0B,CAAX,EAAcgD,KAAtB;AACAyxB,uBAAe1D,YAAY/tB,KAAZ,EAAmBmG,SAAnB,CAA6BorB,KAA7B,CAAf;AACAC,uBAAeA,aAAapxB,OAAb,CAAqB,IAAIf,MAAJ,CAAW8xB,cAAcnxB,KAAzB,EAAgC,GAAhC,CAArB,EAA2DyxB,YAA3D,CAAf;AACH;;AAED,WAAOD,YAAP;AACH,CAlBD;;AAoBA;;;;;AAKAxf,kBAAkB0f,SAAlB,CAA4B71B,KAA5B,GAAoC,UAAU81B,aAAV,EAAyBnuB,OAAzB,EAAkC;AAClE,QAAMitB,gBAAgBze,kBAAkBye,aAAlB,EAAtB;AACA,QAAM/D,WAAW,KAAKkF,iBAAL,CAAuBD,aAAvB,CAAjB;AACA,QAAME,aAAa7f,kBAAkB4a,uBAArC;AACA,QAAMkF,UAAUtuB,WAAWA,QAAQsuB,OAAnC;AACA,QAAMC,aAAa,EAAnB;AACA,QAAMhsB,OAAO,EAAb;AACA,QAAIisB,oBAAJ;AACA,QAAIC,uBAAJ;AACA,QAAIC,mBAAJ;AACA,QAAI1c,YAAJ;AACA,QAAIxY,UAAJ;AACA,QAAIm1B,cAAJ;AACA,QAAIC,oBAAJ;AACA,QAAI5E,UAAJ;AACA,QAAI7X,SAAS,EAAb;;AAEA,SAAKqc,WAAL,IAAoBvB,aAApB,EAAmC;AAC/B,YAAI,CAAC,GAAG4B,cAAH,CAAkB3rB,IAAlB,CAAuB+pB,aAAvB,EAAsCuB,WAAtC,CAAL,EAAyD;AAAE;AAAW;;AAEtEjsB,aAAK7H,MAAL,GAAc,CAAd;AACA+zB,yBAAiBxB,cAAcuB,WAAd,CAAjB;AACAE,qBAAaD,eAAeptB,MAAf,CAAsBotB,eAAe/zB,MAAf,GAAwB,CAA9C,EAAiD,CAAjD,EAAoD,CAApD,CAAb;;AAEA,aAAKlB,IAAI,CAAJ,EAAOwwB,IAAIyE,eAAe/zB,MAA/B,EAAuClB,IAAIwwB,CAA3C,EAA8CxwB,GAA9C,EAAmD;AAC/Cm1B,oBAAQF,eAAej1B,CAAf,CAAR;AACAwY,kBAAMkX,SAASyF,MAAMp1B,IAAf,CAAN;;AAEA,gBAAIyY,QAAQ3N,SAAZ,EAAuB;AACnB9B,qBAAKjI,IAAL,CAAU,IAAV;AACH,aAFD,MAEO;AACHiI,qBAAKjI,IAAL,CAAU,CAACq0B,KAAD,EAAQ3c,GAAR,CAAV;AACH;AACJ;;AAED4c,sBAAcF,WAAWI,KAAX,CAAiB,IAAjB,EAAuBvsB,IAAvB,CAAd;;AAEA,YAAI,CAACqsB,gBAAgBvqB,SAAhB,IAA6BuqB,gBAAgB,IAA9C,KAAuD,CAACN,OAA5D,EAAqE;AACjE;AACH;;AAEDC,mBAAWF,WAAWG,WAAX,CAAX,IAAsCI,WAAtC;AACH;;AAED,QAAIL,WAAW7zB,MAAX,IAAqB,KAAKq0B,eAAL,CAAqBR,WAAW7zB,MAAhC,CAAzB,EACC;AACGyX,eAAOkN,OAAP,CAAekP,WAAW,CAAX,CAAf,EAA8B,CAA9B,EAAiC,CAAjC;AAAsC,KAF1C,MAGK;AACDpc,eAAOkN,OAAP,eAAkBkP,UAAlB;AACH;;AAED,WAAOpc,MAAP;AACH,CApDD;;AAsDA;;;;;AAKA3D,kBAAkB0f,SAAlB,CAA4BE,iBAA5B,GAAgD,UAAUD,aAAV,EAAyB;AACrE,QAAM11B,SAAS,KAAKA,MAApB;AACA,QAAM8xB,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAMyD,cAAcnf,kBAAkB2a,YAAtC;AACA,QAAM0E,aAAarf,kBAAkBkf,UAAlB,CAA6Bj1B,MAA7B,CAAnB;AACA,QAAMu2B,WAAW,EAAjB;;AAEA,QAAIC,4BAAJ;AACA,QAAIC,eAAJ;AACA,QAAIC,iBAAJ;AACA,QAAIC,mBAAJ;AACA,QAAIC,oBAAJ;;AAEA,QAAIrF,UAAJ;AACA,QAAIxwB,UAAJ;;AAEA61B,kBAAcnd,OAAOzZ,MAAP,CAAd;;AAEA,QAAM62B,WAAWzB,WAAWv0B,GAAX,CAAe;AAAA,eAAO0N,IAAIxK,KAAX;AAAA,KAAf,CAAjB;AACA,QAAM+yB,mBAAmB1B,WAAWnzB,MAApC;AACA,SAAKlB,IAAI+1B,mBAAmB,CAA5B,EAA+B/1B,KAAK,CAApC,EAAuCA,GAAvC,EAA4C;AACxC21B,mBAAWtB,WAAWr0B,CAAX,EAAc2O,KAAzB;;AAEA,YAAIgnB,WAAW,CAAX,KAAiBE,YAAY30B,MAAZ,GAAqB,CAA1C,EAA6C;AACzCu0B,kCAAsBE,QAAtB;AACA;AACH;;AAED,YAAIF,wBAAwB5qB,SAA5B,EAAuC;AACnC4qB,kCAAsBI,YAAY30B,MAAlC;AACH;;AAED00B,qBAAaC,YAAYvD,SAAZ,CAAsBqD,WAAW,CAAjC,EAAoCF,mBAApC,CAAb;AACAI,sBAAcA,YAAYvD,SAAZ,CAAsB,CAAtB,EAAyBqD,WAAW,CAApC,IACVtzB,OAAOotB,MAAP,CAAcmG,UAAd,CADU,GAEVC,YAAYvD,SAAZ,CAAsBmD,mBAAtB,EAA2CI,YAAY30B,MAAvD,CAFJ;;AAIAu0B,8BAAsBE,QAAtB;AACH;;AAED,SAAK31B,IAAI,CAAT,EAAYA,IAAI+1B,gBAAhB,EAAkC/1B,GAAlC,EAAuC;AACnC01B,iBAASrB,WAAWr0B,CAAX,CAAT;AACA61B,sBAAcA,YAAYzyB,OAAZ,CAAoB+wB,cAAcuB,OAAO1yB,KAAzC,EAAgD+tB,YAAY2E,OAAO1yB,KAAnB,EAA0BiuB,OAA1B,EAAhD,CAAd;AACH;;AAED,QAAM+E,gBAAgBrB,cAAc7b,KAAd,CAAoB,IAAIzW,MAAJ,CAAWwzB,WAAX,CAApB,KAAgD,EAAtE;AACAG,kBAAc7H,KAAd;;AAEA,SAAKnuB,IAAI,CAAJ,EAAOwwB,IAAIsF,SAAS50B,MAAzB,EAAiClB,IAAIwwB,CAArC,EAAwCxwB,GAAxC,EAA6C;AACzCw1B,iBAASM,SAAS91B,CAAT,CAAT,IAAwBg2B,cAAch2B,CAAd,CAAxB;AACH;AACD,WAAOw1B,QAAP;AACH,CApDD;;AAsDA;;;;;AAKAxgB,kBAAkB0f,SAAlB,CAA4Bnb,aAA5B,GAA4C,UAAUob,aAAV,EAAyB;AACjE,QAAIpzB,OAAO,IAAX;AACA,QAAI2J,OAAOmP,QAAP,CAAgBsa,aAAhB,CAAJ,EAAoC;AAChCpzB,eAAO,IAAI9C,IAAJ,CAASk2B,aAAT,CAAP;AACH,KAFD,MAEO,IAAI,CAAC,KAAK11B,MAAN,IAAgBR,KAAKI,KAAL,CAAW81B,aAAX,CAApB,EAA+C;AAClDpzB,eAAO,IAAI9C,IAAJ,CAASk2B,aAAT,CAAP;AACH,KAFM,MAGF;AACD,YAAMjF,WAAW,KAAKA,QAAL,GAAgB,KAAK7wB,KAAL,CAAW81B,aAAX,CAAjC;AACA,YAAIjF,SAASxuB,MAAb,EAAqB;AACjB,iBAAKoY,UAAL,sCAAsB7a,IAAtB,mCAA8BixB,QAA9B;AACAnuB,mBAAO,KAAK+X,UAAZ;AACH;AACJ;AACD,WAAO/X,IAAP;AACH,CAfD;;AAiBAyT,kBAAkB0f,SAAlB,CAA4Ba,eAA5B,GAA8C,UAAS3X,GAAT,EAAc;AACxD,WAAOA,QAAQ,CAAR,IAAa,KAAK3e,MAAL,CAAY6Z,KAAZ,CAAkB,MAAlB,EAA0B5X,MAA9C;AACH,CAFD;;AAIA;;;;;;AAMA8T,kBAAkB0f,SAAlB,CAA4Bna,QAA5B,GAAuC,UAAUtb,MAAV,EAAkB01B,aAAlB,EAAiC;AACpE,QAAIrb,mBAAJ;;AAEA,QAAIqb,aAAJ,EAAmB;AACfrb,qBAAa,KAAKA,UAAL,GAAkB,KAAKC,aAAL,CAAmBob,aAAnB,CAA/B;AACH,KAFD,MAEO,IAAI,EAAErb,aAAa,KAAKA,UAApB,CAAJ,EAAqC;AACxCA,qBAAa,KAAKC,aAAL,CAAmBob,aAAnB,CAAb;AACH;;AAED,WAAO3f,kBAAkBuF,QAAlB,CAA2BjB,UAA3B,EAAuCra,MAAvC,CAAP;AACH,CAVD;;;;;;;;;;;;;;ACluBA;AAAA;;;;;;AAMe,yEAACqH,IAAD,EAAU;AACrB,QAAIuQ,MAAM3L,OAAO4L,iBAAjB;AACA,QAAIC,MAAM7L,OAAO8L,iBAAjB;;AAEA1Q,SAAK1F,OAAL,CAAa,UAACgJ,CAAD,EAAO;AAChB,YAAIA,IAAIiN,GAAR,EAAa;AACTA,kBAAMjN,CAAN;AACH;AACD,YAAIA,IAAImN,GAAR,EAAa;AACTA,kBAAMnN,CAAN;AACH;AACJ,KAPD;;AASA,WAAO,CAACiN,GAAD,EAAME,GAAN,CAAP;AACH,CAdD,E;;;;;;;;;;;;;;;;ACNA;AACA,IAAMkf,eAAe,QAArB;AACA,IAAMC,gBAAgBx1B,OAAOg0B,SAAP,CAAiBvD,QAAvC;AACA,IAAMgF,cAAc,iBAApB;AACA,IAAMC,aAAa,gBAAnB;;AAEA,SAASC,cAAT,CAAwB7oB,GAAxB,EAA6B8oB,SAA7B,EAAwC;AACpC,QAAIt2B,IAAIs2B,UAAUp1B,MAAlB;AACA,QAAIq1B,SAAS,CAAC,CAAd;;AAEA,WAAOv2B,CAAP,EAAU;AACN,YAAIwN,QAAQ8oB,UAAUt2B,CAAV,CAAZ,EAA0B;AACtBu2B,qBAASv2B,CAAT;AACA,mBAAOu2B,MAAP;AACH;AACDv2B,aAAK,CAAL;AACH;;AAED,WAAOu2B,MAAP;AACH;;AAED,SAASrL,KAAT,CAAesL,IAAf,EAAqBC,IAArB,EAA2BC,SAA3B,EAAsCC,MAAtC,EAA8CC,MAA9C,EAAsD;AAClD,QAAIhuB,IAAJ,EACIiuB,MADJ,EAEIC,MAFJ,EAGIxuB,GAHJ,EAIIyuB,IAJJ;AAKA;AACA;AACA;;AAEA,QAAI,CAACH,MAAL,EAAa;AACTD,iBAAS,CAACH,IAAD,CAAT;AACAI,iBAAS,CAACH,IAAD,CAAT;AACH,KAHD,MAIK;AACDE,eAAO71B,IAAP,CAAY01B,IAAZ;AACAI,eAAO91B,IAAP,CAAY21B,IAAZ;AACH;;AAED,QAAIA,gBAAgBt1B,KAApB,EAA2B;AACvB,aAAKyH,OAAO,CAAZ,EAAeA,OAAO6tB,KAAKv1B,MAA3B,EAAmC0H,QAAQ,CAA3C,EAA8C;AAC1C,gBAAI;AACAiuB,yBAASL,KAAK5tB,IAAL,CAAT;AACAkuB,yBAASL,KAAK7tB,IAAL,CAAT;AACH,aAHD,CAIA,OAAOsB,CAAP,EAAU;AACN;AACH;;AAED,gBAAI,QAAO4sB,MAAP,yCAAOA,MAAP,OAAkBb,YAAtB,EAAoC;AAChC,oBAAI,EAAES,aAAaI,WAAWjsB,SAA1B,CAAJ,EAA0C;AACtC2rB,yBAAK5tB,IAAL,IAAakuB,MAAb;AACH;AACJ,aAJD,MAKK;AACD,oBAAID,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBZ,YAAzC,EAAuD;AACnDY,6BAASL,KAAK5tB,IAAL,IAAakuB,kBAAkB31B,KAAlB,GAA0B,EAA1B,GAA+B,EAArD;AACH;AACD41B,uBAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,oBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,6BAASL,KAAK5tB,IAAL,IAAa+tB,OAAOI,IAAP,CAAtB;AACH,iBAFD,MAGK;AACD7L,0BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ;AACJ;AACJ,KA5BD,MA6BK;AACD,aAAKhuB,IAAL,IAAa6tB,IAAb,EAAmB;AACf,gBAAI;AACAI,yBAASL,KAAK5tB,IAAL,CAAT;AACAkuB,yBAASL,KAAK7tB,IAAL,CAAT;AACH,aAHD,CAIA,OAAOsB,CAAP,EAAU;AACN;AACH;;AAED,gBAAI4sB,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBb,YAAzC,EAAuD;AACnD;AACA;AACA;AACA;AACA3tB,sBAAM4tB,cAAcxsB,IAAd,CAAmBotB,MAAnB,CAAN;AACA,oBAAIxuB,QAAQ6tB,WAAZ,EAAyB;AACrB,wBAAIU,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBZ,YAAzC,EAAuD;AACnDY,iCAASL,KAAK5tB,IAAL,IAAa,EAAtB;AACH;AACDmuB,2BAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,wBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,iCAASL,KAAK5tB,IAAL,IAAa+tB,OAAOI,IAAP,CAAtB;AACH,qBAFD,MAGK;AACD7L,8BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ,iBAXD,MAYK,IAAItuB,QAAQ8tB,UAAZ,EAAwB;AACzB,wBAAIS,WAAW,IAAX,IAAmB,EAAEA,kBAAkB11B,KAApB,CAAvB,EAAmD;AAC/C01B,iCAASL,KAAK5tB,IAAL,IAAa,EAAtB;AACH;AACDmuB,2BAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,wBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,iCAASL,KAAK5tB,IAAL,IAAa+tB,OAAOI,IAAP,CAAtB;AACH,qBAFD,MAGK;AACD7L,8BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ,iBAXI,MAYA;AACDJ,yBAAK5tB,IAAL,IAAakuB,MAAb;AACH;AACJ,aAjCD,MAkCK;AACD,oBAAIJ,aAAaI,WAAWjsB,SAA5B,EAAuC;AACnC;AACH;AACD2rB,qBAAK5tB,IAAL,IAAakuB,MAAb;AACH;AACJ;AACJ;AACD,WAAON,IAAP;AACH;;AAGD,SAASnW,OAAT,CAAkBmW,IAAlB,EAAwBC,IAAxB,EAA8BC,SAA9B,EAAyC;AACrC;AACA,QAAI,QAAOF,IAAP,yCAAOA,IAAP,OAAgBP,YAAhB,IAAgC,QAAOQ,IAAP,yCAAOA,IAAP,OAAgBR,YAApD,EAAkE;AAC9D,eAAO,IAAP;AACH;;AAED,QAAI,QAAOQ,IAAP,yCAAOA,IAAP,OAAgBR,YAAhB,IAAgCQ,SAAS,IAA7C,EAAmD;AAC/C,eAAOD,IAAP;AACH;;AAED,QAAI,QAAOA,IAAP,yCAAOA,IAAP,OAAgBP,YAApB,EAAkC;AAC9BO,eAAOC,gBAAgBt1B,KAAhB,GAAwB,EAAxB,GAA6B,EAApC;AACH;AACD+pB,UAAMsL,IAAN,EAAYC,IAAZ,EAAkBC,SAAlB;AACA,WAAOF,IAAP;AACH;;;;;;;;;;;;;;;;;;;;;;;;;;;AC5ID;;AAEA;;;;;;AAMO,SAASnvB,OAAT,CAAkBmR,GAAlB,EAAuB;AAC1B,WAAOrX,MAAMkG,OAAN,CAAcmR,GAAd,CAAP;AACH;;AAED;;;;;;AAMO,SAASwe,QAAT,CAAmBxe,GAAnB,EAAwB;AAC3B,WAAOA,QAAQ9X,OAAO8X,GAAP,CAAf;AACH;;AAED;;;;;;AAMO,SAASye,QAAT,CAAmBze,GAAnB,EAAwB;AAC3B,WAAO,OAAOA,GAAP,KAAe,QAAtB;AACH;;AAED;;;;;;AAMO,SAAS8U,UAAT,CAAqB9U,GAArB,EAA0B;AAC7B,WAAO,OAAOA,GAAP,KAAe,UAAtB;AACH;;AAED;;;;;;AAMO,SAAS0e,YAAT,CAAuB5wB,IAAvB,EAA6B;AAChC,wCAAW,IAAIkQ,GAAJ,CAAQlQ,IAAR,CAAX;AACH;;AAEM,IAAMuP,cAAc,SAAdA,WAAc;AAAA,mBAAY,IAAIpX,IAAJ,GAAW+a,OAAX,EAAZ,GAAmCY,KAAK+c,KAAL,CAAW/c,KAAKgd,MAAL,KAAgB,KAA3B,CAAnC;AAAA,CAApB;;AAEP;;;;;;;AAOO,SAAS5O,UAAT,CAAoB6O,IAApB,EAA0BC,IAA1B,EAAgC;AACnC,QAAI,CAACjwB,QAAQgwB,IAAR,CAAD,IAAkB,CAAChwB,QAAQiwB,IAAR,CAAvB,EAAsC;AAClC,eAAOD,SAASC,IAAhB;AACH;;AAED,QAAID,KAAKn2B,MAAL,KAAgBo2B,KAAKp2B,MAAzB,EAAiC;AAC7B,eAAO,KAAP;AACH;;AAED,SAAK,IAAIlB,IAAI,CAAb,EAAgBA,IAAIq3B,KAAKn2B,MAAzB,EAAiClB,GAAjC,EAAsC;AAClC,YAAIq3B,KAAKr3B,CAAL,MAAYs3B,KAAKt3B,CAAL,CAAhB,EAAyB;AACrB,mBAAO,KAAP;AACH;AACJ;;AAED,WAAO,IAAP;AACH;;AAED;;;;;;AAMO,SAASuY,YAAT,CAAsBC,GAAtB,EAA2B;AAC9B,WAAOA,GAAP;AACH;;AAED;;;;;;AAMO,IAAMrR,mBAAmB,SAAnBA,gBAAmB,CAACb,IAAD,EAAU;AACtC,QAAI2wB,SAAS3wB,IAAT,CAAJ,EAAoB;AAChB,eAAOF,iDAAUA,CAACO,OAAlB;AACH,KAFD,MAEO,IAAIU,QAAQf,IAAR,KAAiBe,QAAQf,KAAK,CAAL,CAAR,CAArB,EAAuC;AAC1C,eAAOF,iDAAUA,CAACK,OAAlB;AACH,KAFM,MAEA,IAAIY,QAAQf,IAAR,MAAkBA,KAAKpF,MAAL,KAAgB,CAAhB,IAAqB81B,SAAS1wB,KAAK,CAAL,CAAT,CAAvC,CAAJ,EAA+D;AAClE,eAAOF,iDAAUA,CAACS,SAAlB;AACH;AACD,WAAO,IAAP;AACH,CATM,C;;;;;;;;;;;;AC/FP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;ACHA;;AAEA;;;;;;;;;IAQM0wB,Y;AACF,4BAAe;AAAA;;AAAA;;AACX,aAAKpyB,KAAL,GAAa,IAAIC,GAAJ,EAAb;AACA,aAAKD,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6B0kB,oDAA7B;;AAEA3pB,eAAOga,OAAP,CAAesP,gDAAf,EAAuBppB,OAAvB,CAA+B,UAAC1C,GAAD,EAAS;AACpC,kBAAKiH,KAAL,CAAWQ,GAAX,CAAezH,IAAI,CAAJ,CAAf,EAAuBA,IAAI,CAAJ,CAAvB;AACH,SAFD;AAGH;;AAED;;;;;;;;;;;;;yCAS2B;AACvB,gBAAI,CAAC,UAAOgD,MAAZ,EAAoB;AAChB,uBAAO,KAAKiE,KAAL,CAAWc,GAAX,CAAe,YAAf,CAAP;AACH;;AAED,gBAAIuxB,0DAAJ;;AAEA,gBAAI,OAAOA,OAAP,KAAmB,UAAvB,EAAmC;AAC/B,qBAAKryB,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6B6xB,OAA7B;AACH,aAFD,MAEO;AACHA,0BAAU9e,OAAO8e,OAAP,CAAV;AACA,oBAAI92B,OAAOmI,IAAP,CAAYmhB,gDAAZ,EAAoBzf,OAApB,CAA4BitB,OAA5B,MAAyC,CAAC,CAA9C,EAAiD;AAC7C,yBAAKryB,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6BqkB,gDAAMA,CAACwN,OAAP,CAA7B;AACH,iBAFD,MAEO;AACH,0BAAM,IAAIxwB,KAAJ,cAAqBwwB,OAArB,4BAAN;AACH;AACJ;AACD,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iCAgCUz3B,I,EAAMy3B,O,EAAS;AAAA;;AACrB,gBAAI,OAAOA,OAAP,KAAmB,UAAvB,EAAmC;AAC/B,sBAAM,IAAIxwB,KAAJ,CAAU,8BAAV,CAAN;AACH;;AAEDjH,mBAAO2Y,OAAO3Y,IAAP,CAAP;AACA,iBAAKoF,KAAL,CAAWQ,GAAX,CAAe5F,IAAf,EAAqBy3B,OAArB;;AAEA,mBAAO,YAAM;AAAE,uBAAKC,YAAL,CAAkB13B,IAAlB;AAA0B,aAAzC;AACH;;;qCAEaA,I,EAAM;AAChB,gBAAI,KAAKoF,KAAL,CAAWa,GAAX,CAAejG,IAAf,CAAJ,EAA0B;AACtB,qBAAKoF,KAAL,CAAWY,MAAX,CAAkBhG,IAAlB;AACH;AACJ;;;gCAEQA,I,EAAM;AACX,gBAAIA,gBAAgBF,QAApB,EAA8B;AAC1B,uBAAOE,IAAP;AACH;AACD,mBAAO,KAAKoF,KAAL,CAAWc,GAAX,CAAelG,IAAf,CAAP;AACH;;;;;;AAGL,IAAMkM,eAAgB,YAAY;AAC9B,QAAI9G,QAAQ,IAAZ;;AAEA,aAASgB,QAAT,GAAqB;AACjB,YAAIhB,UAAU,IAAd,EAAoB;AAChBA,oBAAQ,IAAIoyB,YAAJ,EAAR;AACH;AACD,eAAOpyB,KAAP;AACH;AACD,WAAOgB,UAAP;AACH,CAVqB,EAAtB;;AAYe8F,2EAAf,E;;;;;;;;;;;;;;;;;;ACtHA;;AAEA;;;;;;;IAMMwB,K;;AAEJ;;;;;;AAME,mBAAatP,KAAb,EAAoB0c,QAApB,EAA8B1S,KAA9B,EAAqC;AAAA;;AACjC,YAAMyS,iBAAiBuJ,qEAAqBA,CAAChc,KAAtB,EAA6BhK,KAA7B,CAAvB;;AAEAuC,eAAOg3B,gBAAP,CAAwB,IAAxB,EAA8B;AAC1BjT,oBAAQ;AACJkT,4BAAY,KADR;AAEJC,8BAAc,KAFV;AAGJC,0BAAU,KAHN;AAIJ15B;AAJI,aADkB;AAO1B25B,6BAAiB;AACbH,4BAAY,KADC;AAEbC,8BAAc,KAFD;AAGbC,0BAAU,KAHG;AAIb15B,uBAAOyc;AAJM,aAPS;AAa1Bmd,4BAAgB;AACZJ,4BAAY,KADA;AAEZC,8BAAc,KAFF;AAGZC,0BAAU,KAHE;AAIZ15B,uBAAO0c;AAJK;AAbU,SAA9B;;AAqBA,aAAK1S,KAAL,GAAaA,KAAb;AACH;;AAEH;;;;;;;;;;;AAuBA;;;;;;;mCAOc;AACR,mBAAOuQ,OAAO,KAAKva,KAAZ,CAAP;AACH;;AAEH;;;;;;;;;kCAMa;AACP,mBAAO,KAAKA,KAAZ;AACH;;;4BArCY;AACT,mBAAO,KAAKsmB,MAAZ;AACH;;AAED;;;;;;4BAGsB;AAClB,mBAAO,KAAKqT,eAAZ;AACH;;AAED;;;;;;4BAGqB;AACjB,mBAAO,KAAKC,cAAZ;AACH;;;;;;AAwBUtqB,oEAAf,E","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = \"./src/index.js\");\n","export default function autoType(object) {\n for (var key in object) {\n var value = object[key].trim(), number;\n if (!value) value = null;\n else if (value === \"true\") value = true;\n else if (value === \"false\") value = false;\n else if (value === \"NaN\") value = NaN;\n else if (!isNaN(number = +value)) value = number;\n else if (/^([-+]\\d{2})?\\d{4}(-\\d{2}(-\\d{2})?)?(T\\d{2}:\\d{2}(:\\d{2}(\\.\\d{3})?)?(Z|[-+]\\d{2}:\\d{2})?)?$/.test(value)) value = new Date(value);\n else continue;\n object[key] = value;\n }\n return object;\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","export {default as dsvFormat} from \"./dsv\";\nexport {csvParse, csvParseRows, csvFormat, csvFormatBody, csvFormatRows} from \"./csv\";\nexport {tsvParse, tsvParseRows, tsvFormat, tsvFormatBody, tsvFormatRows} from \"./tsv\";\nexport {default as autoType} from \"./autoType\";\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataConverter from './model/dataConverter';\nimport { DSVStringConverter, DSVArrayConverter, JSONConverter, AutoDataConverter } from './defaultConverters';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n * Sets the given converters in the store and returns the store\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n * Registers a Converter of type DataConverter\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n * Rempves a converter from store\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","export { default as DSVStringConverter } from './dsvStringConverter';\nexport { default as JSONConverter } from './jsonConverter';\nexport { default as DSVArrayConverter } from './dsvArrayConverter';\nexport { default as AutoDataConverter } from './autoConverter';\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import converterStore from './dataConverterStore';\nimport DataConverter from './model/dataConverter';\n\nexport { DataConverter, converterStore };\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * FilteringMode determines if resultant DataModel should be created from selection set or rejection set.\n *\n * The following modes are available\n * - `NORMAL`: Only entries from selection set are included in the resulatant DataModel instance\n * - `INVERSE`: Only entries from rejection set are included in the resulatant DataModel instance\n * - ALL: Both the entries from selection and rejection set are returned in two different DataModel instance\n */\n\nexport { default as DataFormat } from './data-format';\nexport { default as DimensionSubtype } from './dimension-subtype';\nexport { default as MeasureSubtype } from './measure-subtype';\nexport { default as FieldType } from './field-type';\nexport { default as FilteringMode } from './filtering-mode';\nexport { default as GROUP_BY_FUNCTIONS } from './group-by-functions';\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","export { default as Dimension } from './dimension';\nexport { default as Measure } from './measure';\nexport { default as FieldParser } from './parsers/field-parser';\nexport { default as fieldRegistry } from './field-registry';\nexport { columnMajor } from '../utils';\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS, ROW_ID } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects;\n\n const selectorHelperFn = index => selectFn(\n cachedValueObjects[index],\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nconst keyFn = (arr, fields, idx, rowId) => {\n const val = fields[arr[idx]].internalValue;\n return arr[idx] === ROW_ID ? rowId : val;\n};\n\nconst boundsChecker = {\n [MeasureSubtype.CONTINUOUS]: (val, domain) => {\n const domainArr = domain[0] instanceof Array ? domain : [domain];\n return domainArr.some(dom => val >= dom[0] && val <= dom[1]);\n }\n};\n\nconst isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain);\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((criteria = {}) => {\n const { identifiers = [[], []], range } = criteria;\n const [fieldNames = [], values = []] = identifiers;\n const dLen = fieldNames.length;\n const valuesMap = {};\n\n if (dLen) {\n for (let i = 1, len = identifiers.length; i < len; i++) {\n const row = identifiers[i];\n const key = row.join();\n valuesMap[key] = 1;\n }\n }\n const rangeKeys = Object.keys(range || {});\n return values.length || rangeKeys.length ? (fields, i) => {\n const present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true;\n\n if (filterByMeasure) {\n return rangeKeys.every((field) => {\n const val = fields[field].internalValue;\n return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype);\n }) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const { type, subtype, name } = unitSchema;\n if (type === FieldType.DIMENSION || type === FieldType.MEASURE) {\n if (!fieldRegistry.has(subtype)) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n } else {\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","export { createBinnedFieldData } from './bucket-creator';\nexport { compose, bin, select, project, groupBy as groupby } from './compose';\nexport { calculateVariable, sort } from './pure-operators';\nexport { crossProduct } from './cross-product';\nexport { dataBuilder } from './data-builder';\nexport { difference } from './difference';\nexport { getCommonSchema } from './get-common-schema';\nexport { defReducer, fnList } from './group-by-function';\nexport { groupBy, getFieldArr, getReducerObj } from './group-by';\nexport { mergeSort } from './merge-sort';\nexport { naturalJoinFilter } from './natural-join-filter-function';\nexport { naturalJoin } from './natural-join';\nexport { leftOuterJoin, rightOuterJoin, fullOuterJoin } from './outer-join';\nexport { rowDiffsetIterator } from './row-diffset-iterator';\nexport { union } from './union';\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * Generates domain for measure field.\n *\n * @param {Array} data - The array of data.\n * @return {Array} Returns the measure domain.\n */\nexport default (data) => {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n data.forEach((d) => {\n if (d < min) {\n min = d;\n }\n if (d > max) {\n max = d;\n }\n });\n\n return [min, max];\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","export { default as DateTimeFormatter } from './date-time-formatter';\nexport { default as columnMajor } from './column-major';\nexport { default as generateMeasureDomain } from './domain-generator';\nexport { default as extend2 } from './extend2';\nexport * from './helper';\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n"],"sourceRoot":""} \ No newline at end of file diff --git a/src/helper.js b/src/helper.js index af4b889..1eb10b0 100644 --- a/src/helper.js +++ b/src/helper.js @@ -4,7 +4,7 @@ import Value from './value'; import { rowDiffsetIterator } from './operator'; -import { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants'; +import { DM_DERIVATIVES, LOGICAL_OPERATORS, ROW_ID } from './constants'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import defaultConfig from './default-config'; import { converterStore } from './converter'; @@ -145,12 +145,10 @@ export const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => let cloneProvider = () => sourceDm.detachedRoot(); const { mode } = config; const rowDiffset = clonedDm._rowDiffset; - const fields = clonedDm.getPartialFieldspace().fields; - const formattedFieldsData = fields.map(field => field.formattedData()); - const rawFieldsData = fields.map(field => field.data()); + const cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects; const selectorHelperFn = index => selectFn( - prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index), + cachedValueObjects[index], index, cloneProvider, cachedStore @@ -182,6 +180,20 @@ const getKey = (arr, data, fn) => { return key; }; +const keyFn = (arr, fields, idx, rowId) => { + const val = fields[arr[idx]].internalValue; + return arr[idx] === ROW_ID ? rowId : val; +}; + +const boundsChecker = { + [MeasureSubtype.CONTINUOUS]: (val, domain) => { + const domainArr = domain[0] instanceof Array ? domain : [domain]; + return domainArr.some(dom => val >= dom[0] && val <= dom[1]); + } +}; + +const isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain); + export const filterPropagationModel = (model, propModels, config = {}) => { let fns = []; const operation = config.operation || LOGICAL_OPERATORS.AND; @@ -192,40 +204,28 @@ export const filterPropagationModel = (model, propModels, config = {}) => { if (!propModels.length) { fns = [() => false]; } else { - fns = propModels.map(propModel => ((dataModel) => { - let keyFn; - const dataObj = dataModel.getData(); - const fieldsConfig = dataModel.getFieldsConfig(); - const dimensions = Object.keys(dataModel.getFieldspace().getDimension()) - .filter(d => d in modelFieldsConfig); - const dLen = dimensions.length; - const indices = dimensions.map(d => - fieldsConfig[d].index); - const measures = Object.keys(dataModel.getFieldspace().getMeasure()) - .filter(d => d in modelFieldsConfig); - const fieldsSpace = dataModel.getFieldspace().fieldsObj(); - const data = dataObj.data; - const domain = measures.reduce((acc, v) => { - acc[v] = fieldsSpace[v].domain(); - return acc; - }, {}); + fns = propModels.map(propModel => ((criteria = {}) => { + const { identifiers = [[], []], range } = criteria; + const [fieldNames = [], values = []] = identifiers; + const dLen = fieldNames.length; const valuesMap = {}; - keyFn = (arr, row, idx) => row[arr[idx]]; if (dLen) { - data.forEach((row) => { - const key = getKey(indices, row, keyFn); + for (let i = 1, len = identifiers.length; i < len; i++) { + const row = identifiers[i]; + const key = row.join(); valuesMap[key] = 1; - }); + } } - - keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue; - return data.length ? (fields) => { - const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true; + const rangeKeys = Object.keys(range || {}); + return values.length || rangeKeys.length ? (fields, i) => { + const present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; if (filterByMeasure) { - return measures.every(field => fields[field].internalValue >= domain[field][0] && - fields[field].internalValue <= domain[field][1]) && present; + return rangeKeys.every((field) => { + const val = fields[field].internalValue; + return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); + }) && present; } return present; } : () => false; diff --git a/src/index.spec.js b/src/index.spec.js index 127ab33..aacc022 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -1903,30 +1903,30 @@ describe('DataModel', () => { { name: 'first', type: 'dimension' }, { name: 'second', type: 'dimension' }, ]; - const propModel = new DataModel([{ - first: 'Hey', - second: 'Jude' - }], [{ - name: 'first', - type: 'dimension' - }, { - name: 'second', - type: 'dimension' - }]); - const propModel1 = new DataModel([{ - first: 'Hey', - second: 'Jude', - count: 100 - }], [{ - name: 'first', - type: 'dimension' - }, { - name: 'second', - type: 'dimension' - }, { - name: 'count', - type: 'measure' - }]); + + const propModel = { + fields: [{ + name: 'first', + type: 'dimension' + }, { + name: 'second', + type: 'dimension' + }], + data: [ + ['first', 'second'], + ['Hey', 'Jude'] + ] + }; + + const propModel1 = { + fields: [{ + name: 'sales', + type: 'measure' + }], + range: { + sales: [20, 25] + } + }; let dataModel; let projectionFlag = false; From 53267c46f8e01acd39b08bbc05eac9fcc1f60f9e Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Tue, 22 Oct 2019 13:12:24 +0530 Subject: [PATCH 17/20] feature/MZ-25: Fix test case --- src/datamodel.js | 2 +- src/helper.js | 58 +++++++++++++++++++++++++++++++----------------- 2 files changed, 39 insertions(+), 21 deletions(-) diff --git a/src/datamodel.js b/src/datamodel.js index b7e3652..47046cb 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -580,7 +580,7 @@ class DataModel extends Relation { }, config)); if (isMutableAction) { - propagateImmutableActions(propagationNameSpace, rootModels, { + propagateImmutableActions(propagationNameSpace, rootModel, { config, propConfig }, this); diff --git a/src/helper.js b/src/helper.js index 1eb10b0..e5ceeaf 100644 --- a/src/helper.js +++ b/src/helper.js @@ -185,11 +185,14 @@ const keyFn = (arr, fields, idx, rowId) => { return arr[idx] === ROW_ID ? rowId : val; }; +const domainChecker = (val, domain) => { + const domainArr = domain[0] instanceof Array ? domain : [domain]; + return domainArr.some(dom => val >= dom[0] && val <= dom[1]); +}; + const boundsChecker = { - [MeasureSubtype.CONTINUOUS]: (val, domain) => { - const domainArr = domain[0] instanceof Array ? domain : [domain]; - return domainArr.some(dom => val >= dom[0] && val <= dom[1]); - } + [MeasureSubtype.CONTINUOUS]: domainChecker, + [DimensionSubtype.TEMPORAL]: domainChecker }; const isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain); @@ -197,7 +200,7 @@ const isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](va export const filterPropagationModel = (model, propModels, config = {}) => { let fns = []; const operation = config.operation || LOGICAL_OPERATORS.AND; - const filterByMeasure = config.filterByMeasure || false; + const filterByMeasure = config.filterByMeasure; const clonedModel = cloneWithAllFields(model); const modelFieldsConfig = clonedModel.getFieldsConfig(); @@ -206,28 +209,39 @@ export const filterPropagationModel = (model, propModels, config = {}) => { } else { fns = propModels.map(propModel => ((criteria = {}) => { const { identifiers = [[], []], range } = criteria; - const [fieldNames = [], values = []] = identifiers; + let [fieldNames = [], values = []] = identifiers; + const indices = fieldNames.reduce((map, name, i) => { + map[name] = i; + return map; + }, {}); + fieldNames = fieldNames.filter(field => field in modelFieldsConfig); const dLen = fieldNames.length; const valuesMap = {}; if (dLen) { for (let i = 1, len = identifiers.length; i < len; i++) { const row = identifiers[i]; - const key = row.join(); + const key = `${fieldNames.map((field) => { + const idx = indices[field]; + return row[idx]; + })}`; valuesMap[key] = 1; } } - const rangeKeys = Object.keys(range || {}); - return values.length || rangeKeys.length ? (fields, i) => { + let rangeKeys = Object.keys(range || {}).filter(field => field in modelFieldsConfig); + const hasData = values.length || rangeKeys.length; + + if (!filterByMeasure) { + rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.MEASURE); + } + + return hasData ? (fields, i) => { const present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; - if (filterByMeasure) { - return rangeKeys.every((field) => { - const val = fields[field].internalValue; - return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); - }) && present; - } - return present; + return rangeKeys.every((field) => { + const val = fields[field].internalValue; + return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); + }) && present; } : () => false; })(propModel)); } @@ -564,7 +578,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf let criterias = []; - if (identifiers === null && config.persistent !== true) { + if (identifiers === null) { criterias = [{ criteria: [] }]; @@ -613,9 +627,10 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf }, config); const rootGroupByModel = rootModels.groupByModel; + if (propagateInterpolatedValues && rootGroupByModel) { propModel = filterPropagationModel(rootGroupByModel, criteria, { - filterByMeasure: propagateInterpolatedValues + filterByMeasure: true }); propagateIdentifiers(rootGroupByModel, propModel, propConfig); } @@ -636,7 +651,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf }); }; -export const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => { +export const propagateImmutableActions = (propagationNameSpace, rootModel, propagationInf) => { const immutableActions = propagationNameSpace.immutableActions; for (const action in immutableActions) { @@ -647,7 +662,10 @@ export const propagateImmutableActions = (propagationNameSpace, rootModels, prop propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true; if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) { const criteriaModel = actionConf.criteria; - propagateToAllDataModels(criteriaModel, rootModels, { + propagateToAllDataModels(criteriaModel, { + model: rootModel, + groupByModel: getRootGroupByModel(actionInf.model) + }, { propagationNameSpace, propagateToSource: false, sourceId: propagationSourceId From e2edbbd30a856d5f555f560a3cec39bbc1d32d47 Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Wed, 23 Oct 2019 15:52:16 +0530 Subject: [PATCH 18/20] feature/MZ-13: Fix row id propagation - Remove propagate interpolated values --- src/datamodel.js | 4 ++- src/helper.js | 85 +++++++++++++++++++++++++++++++----------------- 2 files changed, 58 insertions(+), 31 deletions(-) diff --git a/src/datamodel.js b/src/datamodel.js index 47046cb..e362e0b 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -574,7 +574,9 @@ class DataModel extends Relation { }; addToNameSpace && addToPropNamespace(propagationNameSpace, config, this); - propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId }, + propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, + sourceId: propagationSourceId, + propagationSource: this }, Object.assign({ payload }, config)); diff --git a/src/helper.js b/src/helper.js index e5ceeaf..f73a3d4 100644 --- a/src/helper.js +++ b/src/helper.js @@ -171,18 +171,19 @@ export const cloneWithAllFields = (model) => { return clonedDm; }; -const getKey = (arr, data, fn) => { - let key = fn(arr, data, 0); +const getKey = (arr, data, fn, rowId) => { + let key = fn(arr, data, 0, rowId); for (let i = 1, len = arr.length; i < len; i++) { - key = `${key},${fn(arr, data, i)}`; + key = `${key},${fn(arr, data, i, rowId)}`; } return key; }; const keyFn = (arr, fields, idx, rowId) => { - const val = fields[arr[idx]].internalValue; - return arr[idx] === ROW_ID ? rowId : val; + const field = arr[idx]; + const val = field === ROW_ID ? rowId : fields[field].internalValue; + return val; }; const domainChecker = (val, domain) => { @@ -200,21 +201,22 @@ const isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](va export const filterPropagationModel = (model, propModels, config = {}) => { let fns = []; const operation = config.operation || LOGICAL_OPERATORS.AND; - const filterByMeasure = config.filterByMeasure; - const clonedModel = cloneWithAllFields(model); + const { filterByDim = true, filterByMeasure = false, clone = true } = config; + const clonedModel = clone ? cloneWithAllFields(model) : model; const modelFieldsConfig = clonedModel.getFieldsConfig(); if (!propModels.length) { fns = [() => false]; } else { - fns = propModels.map(propModel => ((criteria = {}) => { + fns = propModels.map(propModel => (({ criteria = {} }) => { const { identifiers = [[], []], range } = criteria; let [fieldNames = [], values = []] = identifiers; const indices = fieldNames.reduce((map, name, i) => { map[name] = i; return map; }, {}); - fieldNames = fieldNames.filter(field => field in modelFieldsConfig); + fieldNames = fieldNames.filter(field => (field in modelFieldsConfig && + modelFieldsConfig[field].def.type === FieldType.DIMENSION) || field === ROW_ID); const dLen = fieldNames.length; const valuesMap = {}; @@ -235,8 +237,15 @@ export const filterPropagationModel = (model, propModels, config = {}) => { rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.MEASURE); } + if (!filterByDim) { + rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.DIMENSION); + } + return hasData ? (fields, i) => { - const present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; + let present = true; + if (filterByDim) { + present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; + } return rangeKeys.every((field) => { const val = fields[field].internalValue; @@ -248,11 +257,11 @@ export const filterPropagationModel = (model, propModels, config = {}) => { let filteredModel; if (operation === LOGICAL_OPERATORS.AND) { - filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), { + filteredModel = clonedModel.select((fields, i) => fns.every(fn => fn(fields, i)), { saveChild: false }); } else { - filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), { + filteredModel = clonedModel.select((fields, i) => fns.some(fn => fn(fields, i)), { saveChild: false }); } @@ -527,6 +536,7 @@ const getFilteredModel = (propModel, path) => { const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => { const nonTraversingModel = propModelInf.nonTraversingModel; const excludeModels = propModelInf.excludeModels || []; + const criterias = propModelInf.criteria; if (dataModel === nonTraversingModel) { return; @@ -538,7 +548,16 @@ const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = const children = dataModel._children; children.forEach((child) => { - const selectionModel = applyExistingOperationOnModel(propModel, child); + const matchingCriteria = criterias.filter(val => val.groupedModel === child); + let selectionModel = applyExistingOperationOnModel(propModel, child); + + if (matchingCriteria.length) { + selectionModel = filterPropagationModel(selectionModel, matchingCriteria, { + filterByDim: false, + filterByMeasure: true, + clone: false + }); + } propagateIdentifiers(child, selectionModel, config, propModelInf); }); }; @@ -567,14 +586,23 @@ export const getPathToRootModel = (model, path = []) => { export const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => { let criteria; - let propModel; const { propagationNameSpace, propagateToSource } = propagationInf; const propagationSourceId = propagationInf.sourceId; - const propagateInterpolatedValues = config.propagateInterpolatedValues; const filterFn = (entry) => { const filter = config.filterFn || (() => true); return filter(entry, config); }; + const addGroupedModel = ({ config: conf, model }) => { + const { criteria: crit } = conf; + let groupedModel; + + if (crit !== null && crit.fields.some(d => d.type === FieldType.MEASURE)) { + groupedModel = getRootGroupByModel(model); + } + return Object.assign({}, conf, { + groupedModel + }); + }; let criterias = []; @@ -589,7 +617,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId); } - const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria); + const filteredCriteria = actionCriterias.filter(filterFn); const excludeModels = []; @@ -601,7 +629,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf if (actionConf.applyOnSource === false && actionConf.action === config.action && actionConf.sourceId !== propagationSourceId) { excludeModels.push(actionInf.model); - criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria); + criteria = sourceActionCriterias.filter(d => d !== actionInf).map(addGroupedModel); criteria.length && criterias.push({ criteria, models: actionInf.model, @@ -612,7 +640,11 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf } - criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null); + criteria = [].concat(...[...filteredCriteria.map(addGroupedModel), { + criteria: identifiers, + groupedModel: identifiers !== null && identifiers.fields.some(d => d.type === FieldType.MEASURE) ? + getRootGroupByModel(propagationInf.propagationSource) : null + }]).filter(d => d !== null); criterias.push({ criteria, excludeModels: [...excludeModels, ...config.excludeModels || []] @@ -626,17 +658,9 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf propagationSourceId }, config); - const rootGroupByModel = rootModels.groupByModel; - - if (propagateInterpolatedValues && rootGroupByModel) { - propModel = filterPropagationModel(rootGroupByModel, criteria, { - filterByMeasure: true - }); - propagateIdentifiers(rootGroupByModel, propModel, propConfig); - } - criterias.forEach((inf) => { - const propagationModel = filterPropagationModel(rootModel, inf.criteria); + const { criteria: crit } = inf; + const propagationModel = filterPropagationModel(rootModel, crit); const path = inf.path; if (path) { @@ -645,7 +669,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf } else { propagateIdentifiers(rootModel, propagationModel, propConfig, { excludeModels: inf.excludeModels, - nonTraversingModel: propagateInterpolatedValues && rootGroupByModel + criteria: crit }); } }); @@ -668,7 +692,8 @@ export const propagateImmutableActions = (propagationNameSpace, rootModel, propa }, { propagationNameSpace, propagateToSource: false, - sourceId: propagationSourceId + sourceId: propagationSourceId, + propagationSource: actionInf.model }, actionConf); } } From 8b9c464c65f2f3a6a766cefa23e3307b92d7db26 Mon Sep 17 00:00:00 2001 From: Ranajit Banerjee Date: Wed, 20 Nov 2019 16:43:36 +0530 Subject: [PATCH 19/20] feature/MZ-104: Fix temporal domain - Sets row id field type as dimension --- dist/datamodel.js | 292 +++++++++++++++++++----------- dist/datamodel.js.map | 2 +- src/fields/continuous/index.js | 23 +-- src/fields/helper.js | 24 +++ src/fields/temporal/index.js | 16 +- src/fields/temporal/index.spec.js | 5 +- src/helper.js | 10 +- src/index.spec.js | 27 ++- src/operator/data-builder.js | 6 +- 9 files changed, 247 insertions(+), 158 deletions(-) create mode 100644 src/fields/helper.js diff --git a/dist/datamodel.js b/dist/datamodel.js index e80d5f7..8e0ba5b 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1688,12 +1688,14 @@ var DataModel = function (_Relation) { }; addToNameSpace && Object(_helper__WEBPACK_IMPORTED_MODULE_1__["addToPropNamespace"])(propagationNameSpace, config, this); - Object(_helper__WEBPACK_IMPORTED_MODULE_1__["propagateToAllDataModels"])(identifiers, rootModels, { propagationNameSpace: propagationNameSpace, sourceId: propagationSourceId }, Object.assign({ + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["propagateToAllDataModels"])(identifiers, rootModels, { propagationNameSpace: propagationNameSpace, + sourceId: propagationSourceId, + propagationSource: this }, Object.assign({ payload: payload }, config)); if (isMutableAction) { - Object(_helper__WEBPACK_IMPORTED_MODULE_1__["propagateImmutableActions"])(propagationNameSpace, rootModels, { + Object(_helper__WEBPACK_IMPORTED_MODULE_1__["propagateImmutableActions"])(propagationNameSpace, rootModel, { config: config, propConfig: propConfig }, this); @@ -2666,11 +2668,10 @@ var Categorical = function (_Dimension) { "use strict"; __webpack_require__.r(__webpack_exports__); -/* harmony import */ var _operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../operator/row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); -/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../enums */ "./src/enums/index.js"); -/* harmony import */ var _measure__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../measure */ "./src/fields/measure/index.js"); -/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../invalid-aware-types */ "./src/invalid-aware-types.js"); -/* harmony import */ var _parsers_continuous_parser__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../parsers/continuous-parser */ "./src/fields/parsers/continuous-parser/index.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../enums */ "./src/enums/index.js"); +/* harmony import */ var _measure__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../measure */ "./src/fields/measure/index.js"); +/* harmony import */ var _parsers_continuous_parser__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../parsers/continuous-parser */ "./src/fields/parsers/continuous-parser/index.js"); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../helper */ "./src/fields/helper.js"); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } @@ -2684,7 +2685,6 @@ function _inherits(subClass, superClass) { if (typeof superClass !== "function" - /** * Represents continuous field subtype. * @@ -2713,7 +2713,7 @@ var Continuous = function (_Measure) { * @return {string} Returns the subtype of the field. */ value: function subtype() { - return _enums__WEBPACK_IMPORTED_MODULE_1__["MeasureSubtype"].CONTINUOUS; + return _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS; } /** @@ -2727,37 +2727,17 @@ var Continuous = function (_Measure) { }, { key: 'calculateDataDomain', value: function calculateDataDomain() { - var _this2 = this; - - var min = Number.POSITIVE_INFINITY; - var max = Number.NEGATIVE_INFINITY; - - // here don't use this.data() as the iteration will be occurred two times on same data. - Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { - var datum = _this2.partialField.data[i]; - if (datum instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__["default"]) { - return; - } - - if (datum < min) { - min = datum; - } - if (datum > max) { - max = datum; - } - }); - - return [min, max]; + return Object(_helper__WEBPACK_IMPORTED_MODULE_3__["calculateContinuousDomain"])(this.partialField.data, this.rowDiffset); } }], [{ key: 'parser', value: function parser() { - return new _parsers_continuous_parser__WEBPACK_IMPORTED_MODULE_4__["default"](); + return new _parsers_continuous_parser__WEBPACK_IMPORTED_MODULE_2__["default"](); } }]); return Continuous; -}(_measure__WEBPACK_IMPORTED_MODULE_2__["default"]); +}(_measure__WEBPACK_IMPORTED_MODULE_1__["default"]); /* harmony default export */ __webpack_exports__["default"] = (Continuous); @@ -3152,6 +3132,45 @@ var Field = function () { /***/ }), +/***/ "./src/fields/helper.js": +/*!******************************!*\ + !*** ./src/fields/helper.js ***! + \******************************/ +/*! exports provided: calculateContinuousDomain */ +/***/ (function(module, __webpack_exports__, __webpack_require__) { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "calculateContinuousDomain", function() { return calculateContinuousDomain; }); +/* harmony import */ var _operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../operator/row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); +/* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../invalid-aware-types */ "./src/invalid-aware-types.js"); + + + +var calculateContinuousDomain = function calculateContinuousDomain(data, rowDiffset) { + var min = Number.POSITIVE_INFINITY; + var max = Number.NEGATIVE_INFINITY; + + // here don't use this.data() as the iteration will be occurred two times on same data. + Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(rowDiffset, function (i) { + var datum = data[i]; + if (datum instanceof _invalid_aware_types__WEBPACK_IMPORTED_MODULE_1__["default"]) { + return; + } + + if (datum < min) { + min = datum; + } + if (datum > max) { + max = datum; + } + }); + + return [min, max]; +}; + +/***/ }), + /***/ "./src/fields/index.js": /*!*****************************!*\ !*** ./src/fields/index.js ***! @@ -3729,6 +3748,7 @@ __webpack_require__.r(__webpack_exports__); /* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../utils */ "./src/utils/index.js"); /* harmony import */ var _invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../../invalid-aware-types */ "./src/invalid-aware-types.js"); /* harmony import */ var _parsers_temporal_parser__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ../parsers/temporal-parser */ "./src/fields/parsers/temporal-parser/index.js"); +/* harmony import */ var _helper__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ../helper */ "./src/fields/helper.js"); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } @@ -3743,6 +3763,7 @@ function _inherits(subClass, superClass) { if (typeof superClass !== "function" + /** * Represents temporal field subtype. * @@ -3782,22 +3803,7 @@ var Temporal = function (_Dimension) { _createClass(Temporal, [{ key: 'calculateDataDomain', value: function calculateDataDomain() { - var _this2 = this; - - var hash = new Set(); - var domain = []; - - // here don't use this.data() as the iteration will be - // occurred two times on same data. - Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { - var datum = _this2.partialField.data[i]; - if (!hash.has(datum)) { - hash.add(datum); - domain.push(datum); - } - }); - - return domain; + return Object(_helper__WEBPACK_IMPORTED_MODULE_5__["calculateContinuousDomain"])(this.partialField.data, this.rowDiffset); } /** @@ -3869,13 +3875,13 @@ var Temporal = function (_Dimension) { }, { key: 'formattedData', value: function formattedData() { - var _this3 = this; + var _this2 = this; var data = []; var dataFormat = this.format(); Object(_operator_row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__["rowDiffsetIterator"])(this.rowDiffset, function (i) { - var datum = _this3.partialField.data[i]; + var datum = _this2.partialField.data[i]; // If value is of invalid type or format is missing if (_invalid_aware_types__WEBPACK_IMPORTED_MODULE_3__["default"].isInvalid(datum) || !dataFormat && Number.isFinite(datum)) { // Use the invalid map value or the raw value @@ -3951,6 +3957,7 @@ __webpack_require__.r(__webpack_exports__); /* harmony import */ var _fields__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(/*! ./fields */ "./src/fields/index.js"); /* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(/*! ./utils */ "./src/utils/index.js"); var _selectModeMap, + _boundsChecker, _this = undefined; var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); @@ -4171,26 +4178,29 @@ var cloneWithAllFields = function cloneWithAllFields(model) { return clonedDm; }; -var getKey = function getKey(arr, data, fn) { - var key = fn(arr, data, 0); +var getKey = function getKey(arr, data, fn, rowId) { + var key = fn(arr, data, 0, rowId); for (var i = 1, len = arr.length; i < len; i++) { - key = key + ',' + fn(arr, data, i); + key = key + ',' + fn(arr, data, i, rowId); } return key; }; var keyFn = function keyFn(arr, fields, idx, rowId) { - var val = fields[arr[idx]].internalValue; - return arr[idx] === _constants__WEBPACK_IMPORTED_MODULE_4__["ROW_ID"] ? rowId : val; + var field = arr[idx]; + var val = field === _constants__WEBPACK_IMPORTED_MODULE_4__["ROW_ID"] ? rowId : fields[field].internalValue; + return val; }; -var boundsChecker = _defineProperty({}, _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS, function (val, domain) { +var domainChecker = function domainChecker(val, domain) { var domainArr = domain[0] instanceof Array ? domain : [domain]; return domainArr.some(function (dom) { return val >= dom[0] && val <= dom[1]; }); -}); +}; + +var boundsChecker = (_boundsChecker = {}, _defineProperty(_boundsChecker, _enums__WEBPACK_IMPORTED_MODULE_0__["MeasureSubtype"].CONTINUOUS, domainChecker), _defineProperty(_boundsChecker, _enums__WEBPACK_IMPORTED_MODULE_0__["DimensionSubtype"].TEMPORAL, domainChecker), _boundsChecker); var isWithinDomain = function isWithinDomain(value, domain, fieldType) { return boundsChecker[fieldType](value, domain); @@ -4201,8 +4211,14 @@ var filterPropagationModel = function filterPropagationModel(model, propModels) var fns = []; var operation = config.operation || _constants__WEBPACK_IMPORTED_MODULE_4__["LOGICAL_OPERATORS"].AND; - var filterByMeasure = config.filterByMeasure || false; - var clonedModel = cloneWithAllFields(model); + var _config$filterByDim = config.filterByDim, + filterByDim = _config$filterByDim === undefined ? true : _config$filterByDim, + _config$filterByMeasu = config.filterByMeasure, + filterByMeasure = _config$filterByMeasu === undefined ? false : _config$filterByMeasu, + _config$clone = config.clone, + clone = _config$clone === undefined ? true : _config$clone; + + var clonedModel = clone ? cloneWithAllFields(model) : model; var modelFieldsConfig = clonedModel.getFieldsConfig(); if (!propModels.length) { @@ -4211,8 +4227,9 @@ var filterPropagationModel = function filterPropagationModel(model, propModels) }]; } else { fns = propModels.map(function (propModel) { - return function () { - var criteria = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + return function (_ref5) { + var _ref5$criteria = _ref5.criteria, + criteria = _ref5$criteria === undefined ? {} : _ref5$criteria; var _criteria$identifiers = criteria.identifiers, identifiers = _criteria$identifiers === undefined ? [[], []] : _criteria$identifiers, range = criteria.range; @@ -4223,27 +4240,57 @@ var filterPropagationModel = function filterPropagationModel(model, propModels) _identifiers$2 = _identifiers[1], values = _identifiers$2 === undefined ? [] : _identifiers$2; + var indices = fieldNames.reduce(function (map, name, i) { + map[name] = i; + return map; + }, {}); + fieldNames = fieldNames.filter(function (field) { + return field in modelFieldsConfig && modelFieldsConfig[field].def.type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION || field === _constants__WEBPACK_IMPORTED_MODULE_4__["ROW_ID"]; + }); var dLen = fieldNames.length; var valuesMap = {}; if (dLen) { - for (var i = 1, len = identifiers.length; i < len; i++) { + var _loop = function _loop(i, len) { var row = identifiers[i]; - var key = row.join(); + var key = '' + fieldNames.map(function (field) { + var idx = indices[field]; + return row[idx]; + }); valuesMap[key] = 1; + }; + + for (var i = 1, len = identifiers.length; i < len; i++) { + _loop(i, len); } } - var rangeKeys = Object.keys(range || {}); - return values.length || rangeKeys.length ? function (fields, i) { - var present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; - - if (filterByMeasure) { - return rangeKeys.every(function (field) { - var val = fields[field].internalValue; - return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); - }) && present; + var rangeKeys = Object.keys(range || {}).filter(function (field) { + return field in modelFieldsConfig; + }); + var hasData = values.length || rangeKeys.length; + + if (!filterByMeasure) { + rangeKeys = rangeKeys.filter(function (field) { + return modelFieldsConfig[field].def.type !== _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE; + }); + } + + if (!filterByDim) { + rangeKeys = rangeKeys.filter(function (field) { + return modelFieldsConfig[field].def.type !== _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].DIMENSION; + }); + } + + return hasData ? function (fields, i) { + var present = true; + if (filterByDim) { + present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; } - return present; + + return rangeKeys.every(function (field) { + var val = fields[field].internalValue; + return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); + }) && present; } : function () { return false; }; @@ -4253,17 +4300,17 @@ var filterPropagationModel = function filterPropagationModel(model, propModels) var filteredModel = void 0; if (operation === _constants__WEBPACK_IMPORTED_MODULE_4__["LOGICAL_OPERATORS"].AND) { - filteredModel = clonedModel.select(function (fields) { + filteredModel = clonedModel.select(function (fields, i) { return fns.every(function (fn) { - return fn(fields); + return fn(fields, i); }); }, { saveChild: false }); } else { - filteredModel = clonedModel.select(function (fields) { + filteredModel = clonedModel.select(function (fields, i) { return fns.some(function (fn) { - return fn(fields); + return fn(fields, i); }); }, { saveChild: false @@ -4552,12 +4599,8 @@ var propagateIdentifiers = function propagateIdentifiers(dataModel, propModel) { var config = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; var propModelInf = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; - var nonTraversingModel = propModelInf.nonTraversingModel; var excludeModels = propModelInf.excludeModels || []; - - if (dataModel === nonTraversingModel) { - return; - } + var criterias = propModelInf.criteria; var propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true; @@ -4565,7 +4608,18 @@ var propagateIdentifiers = function propagateIdentifiers(dataModel, propModel) { var children = dataModel._children; children.forEach(function (child) { + var matchingCriteria = criterias.filter(function (val) { + return val.groupedModel === child; + }); var selectionModel = applyExistingOperationOnModel(propModel, child); + + if (matchingCriteria.length) { + selectionModel = filterPropagationModel(selectionModel, matchingCriteria, { + filterByDim: false, + filterByMeasure: true, + clone: false + }); + } propagateIdentifiers(child, selectionModel, config, propModelInf); }); }; @@ -4598,12 +4652,10 @@ var getPathToRootModel = function getPathToRootModel(model) { var propagateToAllDataModels = function propagateToAllDataModels(identifiers, rootModels, propagationInf, config) { var criteria = void 0; - var propModel = void 0; var propagationNameSpace = propagationInf.propagationNameSpace, propagateToSource = propagationInf.propagateToSource; var propagationSourceId = propagationInf.sourceId; - var propagateInterpolatedValues = config.propagateInterpolatedValues; var filterFn = function filterFn(entry) { var filter = config.filterFn || function () { return true; @@ -4611,15 +4663,32 @@ var propagateToAllDataModels = function propagateToAllDataModels(identifiers, ro return filter(entry, config); }; + var addGroupedModel = function addGroupedModel(_ref6) { + var conf = _ref6.config, + model = _ref6.model; + var crit = conf.criteria; + + var groupedModel = void 0; + + if (crit !== null && crit.fields.some(function (d) { + return d.type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE; + })) { + groupedModel = getRootGroupByModel(model); + } + return Object.assign({}, conf, { + groupedModel: groupedModel + }); + }; + var criterias = []; - if (identifiers === null && config.persistent !== true) { + if (identifiers === null) { criterias = [{ criteria: [] }]; criteria = []; } else { - var _ref5; + var _ref7; var actionCriterias = Object.values(propagationNameSpace.mutableActions); if (propagateToSource !== false) { @@ -4628,9 +4697,7 @@ var propagateToAllDataModels = function propagateToAllDataModels(identifiers, ro }); } - var filteredCriteria = actionCriterias.filter(filterFn).map(function (action) { - return action.config.criteria; - }); + var filteredCriteria = actionCriterias.filter(filterFn); var excludeModels = []; @@ -4643,9 +4710,7 @@ var propagateToAllDataModels = function propagateToAllDataModels(identifiers, ro excludeModels.push(actionInf.model); criteria = sourceActionCriterias.filter(function (d) { return d !== actionInf; - }).map(function (d) { - return d.config.criteria; - }); + }).map(addGroupedModel); criteria.length && criterias.push({ criteria: criteria, models: actionInf.model, @@ -4655,7 +4720,12 @@ var propagateToAllDataModels = function propagateToAllDataModels(identifiers, ro }); } - criteria = (_ref5 = []).concat.apply(_ref5, [].concat(_toConsumableArray(filteredCriteria), [identifiers])).filter(function (d) { + criteria = (_ref7 = []).concat.apply(_ref7, [].concat(_toConsumableArray(filteredCriteria.map(addGroupedModel)), [{ + criteria: identifiers, + groupedModel: identifiers !== null && identifiers.fields.some(function (d) { + return d.type === _enums__WEBPACK_IMPORTED_MODULE_0__["FieldType"].MEASURE; + }) ? getRootGroupByModel(propagationInf.propagationSource) : null + }])).filter(function (d) { return d !== null; }); criterias.push({ @@ -4671,16 +4741,14 @@ var propagateToAllDataModels = function propagateToAllDataModels(identifiers, ro propagationSourceId: propagationSourceId }, config); - var rootGroupByModel = rootModels.groupByModel; - if (propagateInterpolatedValues && rootGroupByModel) { - propModel = filterPropagationModel(rootGroupByModel, criteria, { - filterByMeasure: propagateInterpolatedValues - }); - propagateIdentifiers(rootGroupByModel, propModel, propConfig); - } - criterias.forEach(function (inf) { - var propagationModel = filterPropagationModel(rootModel, inf.criteria); + var crit = inf.criteria; + + var propagationModel = filterPropagationModel(rootModel, crit, { + filterByMeasure: !!crit.find(function (d) { + return d.groupedModel === rootModel; + }) + }); var path = inf.path; if (path) { @@ -4689,13 +4757,13 @@ var propagateToAllDataModels = function propagateToAllDataModels(identifiers, ro } else { propagateIdentifiers(rootModel, propagationModel, propConfig, { excludeModels: inf.excludeModels, - nonTraversingModel: propagateInterpolatedValues && rootGroupByModel + criteria: crit }); } }); }; -var propagateImmutableActions = function propagateImmutableActions(propagationNameSpace, rootModels, propagationInf) { +var propagateImmutableActions = function propagateImmutableActions(propagationNameSpace, rootModel, propagationInf) { var immutableActions = propagationNameSpace.immutableActions; for (var action in immutableActions) { @@ -4705,10 +4773,14 @@ var propagateImmutableActions = function propagateImmutableActions(propagationNa var filterImmutableAction = propagationInf.propConfig.filterImmutableAction ? propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true; if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) { var criteriaModel = actionConf.criteria; - propagateToAllDataModels(criteriaModel, rootModels, { + propagateToAllDataModels(criteriaModel, { + model: rootModel, + groupByModel: getRootGroupByModel(actionInf.model) + }, { propagationNameSpace: propagationNameSpace, propagateToSource: false, - sourceId: propagationSourceId + sourceId: propagationSourceId, + propagationSource: actionInf.model }, actionConf); } } @@ -5485,11 +5557,15 @@ __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "dataBuilder", function() { return dataBuilder; }); /* harmony import */ var _row_diffset_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./row-diffset-iterator */ "./src/operator/row-diffset-iterator.js"); /* harmony import */ var _sort__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./sort */ "./src/operator/sort.js"); +/* harmony import */ var _enums__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../enums */ "./src/enums/index.js"); +/* harmony import */ var _constants__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ../constants */ "./src/constants/index.js"); function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } } + + /** * Builds the actual data array. * @@ -5537,8 +5613,8 @@ function dataBuilder(fieldStore, rowDiffset, colIdentifier, sortingDetails, opti if (addUid) { retObj.schema.push({ - name: 'uid', - type: 'identifier' + name: _constants__WEBPACK_IMPORTED_MODULE_3__["ROW_ID"], + type: _enums__WEBPACK_IMPORTED_MODULE_2__["FieldType"].DIMENSION }); } diff --git a/dist/datamodel.js.map b/dist/datamodel.js.map index 687fae0..e629303 100644 --- a/dist/datamodel.js.map +++ b/dist/datamodel.js.map @@ -1 +1 @@ -{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./node_modules/d3-dsv/src/autoType.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/index.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/converter/defaultConverters/autoConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/defaultConverters/index.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/index.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/enums/index.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/index.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/index.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/natural-join.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/domain-generator.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/utils/index.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/value.js"],"names":["autoType","object","key","value","trim","number","NaN","isNaN","test","Date","csv","dsv","csvParse","parse","csvParseRows","parseRows","csvFormat","format","csvFormatBody","formatBody","csvFormatRows","formatRows","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","Function","map","name","i","JSON","stringify","join","customConverter","f","row","inferColumns","rows","columnSet","Object","create","forEach","column","push","pad","width","s","length","Array","formatYear","year","formatDate","date","hours","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","RegExp","DELIMITER","charCodeAt","text","convert","N","I","n","t","eof","eol","token","j","c","slice","replace","preformatBody","formatValue","concat","formatRow","tsv","tsvParse","tsvParseRows","tsvFormat","tsvFormatBody","tsvFormatRows","PROPAGATION","ROW_ID","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","AND","OR","DataConverterStore","store","Map","converters","_getDefaultConverters","DSVStringConverter","DSVArrayConverter","JSONConverter","AutoDataConverter","set","converter","type","DataConverter","delete","has","get","converterStore","getStore","DataFormat","AUTO","data","schema","options","DSV_ARR","DSVArr","DSV_STR","DSVStr","FLAT_JSON","FlatJSON","_type","Error","Auto","dataFormat","detectDataFormat","arr","isArray","defaultOption","firstRowHeader","schemaFields","unitSchema","assign","columnMajor","headers","splice","headerMap","reduce","acc","h","fields","field","schemaField","headIndex","str","fieldSeparator","d3Dsv","header","insertionIndex","schemaFieldsName","item","keys","DataModel","args","_onPropagation","defOptions","order","formatter","withUid","getAllFields","sort","getPartialFieldspace","dataGenerated","dataBuilder","call","_rowDiffset","d","_colIdentifier","columnWise","addUid","uids","fieldNames","e","fmtFieldNames","fmtFieldIdx","next","idx","indexOf","elem","fIdx","fmtFn","datum","datumIdx","undefined","rowDiffset","ids","diffSets","split","Number","start","end","fill","_","fieldsArr","reducers","config","saveChild","groupByString","params","newDataModel","groupBy","persistDerivations","defaultReducer","reducerStore","setParent","sortingDetails","rawData","getData","dataInCSVArr","sortedDm","constructor","_dataFormat","getFieldspace","colData","formattedData","rowsCount","serializedData","rowIdx","colIdx","fieldName","partialFieldspace","_partialFieldspace","cachedValueObjects","_cachedValueObjects","partialField","fieldsObj","obj","Value","fieldIndex","findIndex","fieldinst","_cachedFieldsObj","_cachedDimension","_cachedMeasure","__calculateFieldspace","calculateFieldsConfig","dependency","sanitizeUnitSchema","replaceVar","fieldsConfig","getFieldsConfig","depVars","retrieveFn","depFieldIndices","fieldSpec","index","clone","fs","suppliedFields","cachedStore","cloneProvider","detachedRoot","computedValues","rowDiffsetIterator","fieldsData","createFields","addField","identifiers","addToNameSpace","propConfig","isMutableAction","propagationSourceId","sourceId","payload","rootModel","getRootDataModel","propagationNameSpace","_propagationNameSpace","rootGroupByModel","getRootGroupByModel","rootModels","groupByModel","model","addToPropNamespace","propagateToAllDataModels","propagateImmutableActions","eventName","callback","propModel","propListeners","fn","measureFieldName","binFieldName","measureField","createBinnedFieldData","binnedData","bins","binField","FieldType","DIMENSION","subtype","DimensionSubtype","BINNED","serialize","getSchema","dimensionArr","reducerFn","defConfig","mode","FilteringMode","NORMAL","splitWithSelect","uniqueFields","commonFields","fieldConfig","allFields","normalizedProjFieldSets","fieldSet","getNormalizedProFields","splitWithProject","InvalidAwareTypes","invalidAwareVals","fieldRegistry","Relation","CATEGORICAL","TEMPORAL","MEASURE","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","MeasureSubtype","CONTINUOUS","Operators","compose","bin","select","project","calculateVariable","crossProduct","difference","naturalJoin","leftOuterJoin","rightOuterJoin","fullOuterJoin","union","version","pkg","Stats","DateTimeFormatter","FieldsUtility","enums","createUnitField","BUILDER","build","createUnitFieldFromPartial","dataColumn","headersObj","fieldStore","createNamespace","fieldArr","dataId","getUniqueId","getMeasure","measureFields","getDimension","dimensionFields","Binned","binsArr","BinnedParser","Dimension","Categorical","hash","Set","domain","add","CategoricalParser","Continuous","min","POSITIVE_INFINITY","max","NEGATIVE_INFINITY","ContinuousParser","Measure","_cachedDomain","calculateDataDomain","Field","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","Temporal","description","displayName","builder","_params","_context","PartialField","parser","unit","defAggFn","defaultReducerName","numberFormat","formatNumber","val","regex","String","result","isInvalid","matched","match","parseFloat","NA","getInvalidType","FieldParser","parsedVal","TemporalParser","_dtf","nativeDate","getNativeDate","getTime","_sanitize","_cachedMinDiff","sortedData","filter","a","b","arrLn","minDiff","prevDatum","nextDatum","processedCount","Math","isFinite","parsedDatum","formatAs","prepareSelectionData","resp","entries","prepareJoinData","formattedValue","rawValue","updateFields","fieldStoreName","colIdentifier","collID","partialFieldMap","newFields","coll","persistCurrentDerivation","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","lastInsertedValueSel","lastInsertedValueRej","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","fieldStoreObj","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","selectorHelperFn","cloneWithAllFields","getKey","len","keyFn","rowId","internalValue","boundsChecker","domainArr","some","dom","isWithinDomain","fieldType","filterPropagationModel","propModels","fns","filterByMeasure","clonedModel","modelFieldsConfig","range","values","dLen","valuesMap","rangeKeys","present","every","def","filteredModel","clonedDMs","cloned","derivation","derivationFormula","addDiffsetToClonedDm","selectConfig","cloneWithSelect","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithProject","projField","projectionSet","actualProjField","projFieldSet","projFields","extend2","validateUnitSchema","sanitizeAndValidateSchema","resolveFieldName","dataHeader","fieldNameAs","as","updateData","relation","defaultConfig","nameSpace","valueObjects","rawFieldsData","formattedFieldsData","fieldInSchema","getDerivationArguments","applyExistingOperationOnModel","dataModel","derivations","getDerivations","selectionModel","getFilteredModel","path","propagateIdentifiers","propModelInf","nonTraversingModel","excludeModels","propagate","handlePropagation","children","_children","child","_parent","find","getPathToRootModel","propagationInf","propagateToSource","propagateInterpolatedValues","filterFn","entry","criterias","persistent","actionCriterias","mutableActions","filteredCriteria","action","sourceActionCriterias","actionInf","actionConf","applyOnSource","models","sourceIdentifiers","inf","propagationModel","reverse","immutableActions","filterImmutableAction","criteriaModel","sourceNamespace","normalizedProjField","search","from","getNumberFormattedVal","require","module","exports","default","_invalidAwareValsMap","_value","NULL","NIL","invalid","nil","null","generateBuckets","binSize","buckets","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","binsCount","dMin","dMax","ceil","abs","unshift","dm","operations","currentDM","firstChild","dispose","defaultFilterFn","dm1","dm2","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","getCommonSchema","tmpSchema","rowAdded","rowPosition","ii","tuple","userArg","cloneProvider1","cloneProvider2","dm1Fields","dm2Fields","tupleObj","cellVal","iii","retObj","reqSorting","tmpDataArr","colIArr","colName","insertInd","sortData","tmpData","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","isArrEqual","prepareDataHelper","addData","hashData","schemaName","fs1","fs2","retArr","fs1Arr","getFilteredValues","sum","filteredNumber","totalSum","curr","avg","filteredValues","first","last","count","variance","mean","num","std","sqrt","fnList","getFieldArr","dimensions","getReducerObj","measures","defReducer","measureName","resolve","existingDataModel","sFieldArr","reducerObj","dbName","measureArr","hashMap","rowCount","defSortFn","a1","b1","merge","lo","mid","hi","sortFn","mainArr","auxArr","mergeSort","naturalJoinFilter","commonSchemaArr","retainTuple","dataModel1","dataModel2","rowDiffArr","diffStr","diffStsArr","getSortFn","dataType","sortType","retFunc","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","toLowerCase","groupData","groupedData","fieldVal","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","arg","label","applyStandardSort","sortMeta","isCallable","sortingFn","m","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","applyGroupSort","detail","sortedGroupMap","nextMap","shift","dataObj","sDetial","groupSortingIdx","standardSortingDetails","groupSortingDetails","pop","source","_fieldStoreName","_fieldspace","joinWith","unionWith","differenceWith","projectionClone","rejectionClone","_fieldConfig","fieldObj","removeChild","sibling","parent","sd","convertToNativeDate","escape","dtParams","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","parseInt","defaultRangeParser","l","nVal","getTokenDefinitions","daysDef","short","long","monthsDef","definitions","H","extract","getHours","toString","p","P","M","mins","getMinutes","S","getSeconds","K","ms","getMilliseconds","day","getDay","A","getDate","month","getMonth","B","y","substring","presentDate","presentYear","trunc","getFullYear","Y","getTokenFormalNames","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","occurrence","forwardChar","nDate","formattedStr","formattedVal","prototype","dateTimeStamp","extractTokenValue","dtParamSeq","noBreak","dtParamArr","resolverKey","resolverParams","resolverFn","param","resolvedVal","hasOwnProperty","apply","checkIfOnlyYear","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","occurrenceLength","extractValues","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","obj1","obj2","skipUndef","tgtArr","srcArr","srcVal","tgtVal","cRef","isObject","isString","uniqueValues","round","random","arr1","arr2","ReducerStore","reducer","__unregister","defineProperties","enumerable","configurable","writable","_formattedValue","_internalValue"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD,O;ACVA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,kDAA0C,gCAAgC;AAC1E;AACA;;AAEA;AACA;AACA;AACA,gEAAwD,kBAAkB;AAC1E;AACA,yDAAiD,cAAc;AAC/D;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iDAAyC,iCAAiC;AAC1E,wHAAgH,mBAAmB,EAAE;AACrI;AACA;;AAEA;AACA;AACA;AACA,mCAA2B,0BAA0B,EAAE;AACvD,yCAAiC,eAAe;AAChD;AACA;AACA;;AAEA;AACA,8DAAsD,+DAA+D;;AAErH;AACA;;;AAGA;AACA;;;;;;;;;;;;;AClFA;AAAA;AAAe,SAASA,QAAT,CAAkBC,MAAlB,EAA0B;AACvC,OAAK,IAAIC,GAAT,IAAgBD,MAAhB,EAAwB;AACtB,QAAIE,QAAQF,OAAOC,GAAP,EAAYE,IAAZ,EAAZ;AAAA,QAAgCC,MAAhC;AACA,QAAI,CAACF,KAAL,EAAYA,QAAQ,IAAR,CAAZ,KACK,IAAIA,UAAU,MAAd,EAAsBA,QAAQ,IAAR,CAAtB,KACA,IAAIA,UAAU,OAAd,EAAuBA,QAAQ,KAAR,CAAvB,KACA,IAAIA,UAAU,KAAd,EAAqBA,QAAQG,GAAR,CAArB,KACA,IAAI,CAACC,MAAMF,SAAS,CAACF,KAAhB,CAAL,EAA6BA,QAAQE,MAAR,CAA7B,KACA,IAAI,8FAA8FG,IAA9F,CAAmGL,KAAnG,CAAJ,EAA+GA,QAAQ,IAAIM,IAAJ,CAASN,KAAT,CAAR,CAA/G,KACA;AACLF,WAAOC,GAAP,IAAcC,KAAd;AACD;AACD,SAAOF,MAAP;AACD,C;;;;;;;;;;;;ACbD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAEA,IAAIS,MAAMC,oDAAGA,CAAC,GAAJ,CAAV;;AAEO,IAAIC,WAAWF,IAAIG,KAAnB;AACA,IAAIC,eAAeJ,IAAIK,SAAvB;AACA,IAAIC,YAAYN,IAAIO,MAApB;AACA,IAAIC,gBAAgBR,IAAIS,UAAxB;AACA,IAAIC,gBAAgBV,IAAIW,UAAxB,C;;;;;;;;;;;;ACRP;AAAA,IAAIC,MAAM,EAAV;AAAA,IACIC,MAAM,EADV;AAAA,IAEIC,QAAQ,EAFZ;AAAA,IAGIC,UAAU,EAHd;AAAA,IAIIC,SAAS,EAJb;;AAMA,SAASC,eAAT,CAAyBC,OAAzB,EAAkC;AAChC,SAAO,IAAIC,QAAJ,CAAa,GAAb,EAAkB,aAAaD,QAAQE,GAAR,CAAY,UAASC,IAAT,EAAeC,CAAf,EAAkB;AAClE,WAAOC,KAAKC,SAAL,CAAeH,IAAf,IAAuB,MAAvB,GAAgCC,CAAhC,GAAoC,GAA3C;AACD,GAFqC,EAEnCG,IAFmC,CAE9B,GAF8B,CAAb,GAEV,GAFR,CAAP;AAGD;;AAED,SAASC,eAAT,CAAyBR,OAAzB,EAAkCS,CAAlC,EAAqC;AACnC,MAAIpC,SAAS0B,gBAAgBC,OAAhB,CAAb;AACA,SAAO,UAASU,GAAT,EAAcN,CAAd,EAAiB;AACtB,WAAOK,EAAEpC,OAAOqC,GAAP,CAAF,EAAeN,CAAf,EAAkBJ,OAAlB,CAAP;AACD,GAFD;AAGD;;AAED;AACA,SAASW,YAAT,CAAsBC,IAAtB,EAA4B;AAC1B,MAAIC,YAAYC,OAAOC,MAAP,CAAc,IAAd,CAAhB;AAAA,MACIf,UAAU,EADd;;AAGAY,OAAKI,OAAL,CAAa,UAASN,GAAT,EAAc;AACzB,SAAK,IAAIO,MAAT,IAAmBP,GAAnB,EAAwB;AACtB,UAAI,EAAEO,UAAUJ,SAAZ,CAAJ,EAA4B;AAC1Bb,gBAAQkB,IAAR,CAAaL,UAAUI,MAAV,IAAoBA,MAAjC;AACD;AACF;AACF,GAND;;AAQA,SAAOjB,OAAP;AACD;;AAED,SAASmB,GAAT,CAAa5C,KAAb,EAAoB6C,KAApB,EAA2B;AACzB,MAAIC,IAAI9C,QAAQ,EAAhB;AAAA,MAAoB+C,SAASD,EAAEC,MAA/B;AACA,SAAOA,SAASF,KAAT,GAAiB,IAAIG,KAAJ,CAAUH,QAAQE,MAAR,GAAiB,CAA3B,EAA8Bf,IAA9B,CAAmC,CAAnC,IAAwCc,CAAzD,GAA6DA,CAApE;AACD;;AAED,SAASG,UAAT,CAAoBC,IAApB,EAA0B;AACxB,SAAOA,OAAO,CAAP,GAAW,MAAMN,IAAI,CAACM,IAAL,EAAW,CAAX,CAAjB,GACHA,OAAO,IAAP,GAAc,MAAMN,IAAIM,IAAJ,EAAU,CAAV,CAApB,GACAN,IAAIM,IAAJ,EAAU,CAAV,CAFJ;AAGD;;AAED,SAASC,UAAT,CAAoBC,IAApB,EAA0B;AACxB,MAAIC,QAAQD,KAAKE,WAAL,EAAZ;AAAA,MACIC,UAAUH,KAAKI,aAAL,EADd;AAAA,MAEIC,UAAUL,KAAKM,aAAL,EAFd;AAAA,MAGIC,eAAeP,KAAKQ,kBAAL,EAHnB;AAIA,SAAOxD,MAAMgD,IAAN,IAAc,cAAd,GACDH,WAAWG,KAAKS,cAAL,EAAX,EAAkC,CAAlC,IAAuC,GAAvC,GAA6CjB,IAAIQ,KAAKU,WAAL,KAAqB,CAAzB,EAA4B,CAA5B,CAA7C,GAA8E,GAA9E,GAAoFlB,IAAIQ,KAAKW,UAAL,EAAJ,EAAuB,CAAvB,CAApF,IACCJ,eAAe,MAAMf,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAA9C,GAAoDX,IAAIa,OAAJ,EAAa,CAAb,CAApD,GAAsE,GAAtE,GAA4Eb,IAAIe,YAAJ,EAAkB,CAAlB,CAA5E,GAAmG,GAAlH,GACDF,UAAU,MAAMb,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAA9C,GAAoDX,IAAIa,OAAJ,EAAa,CAAb,CAApD,GAAsE,GAAhF,GACAF,WAAWF,KAAX,GAAmB,MAAMT,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAAjE,GACA,EAJA,CADN;AAMD;;AAEc,yEAASS,SAAT,EAAoB;AACjC,MAAIC,WAAW,IAAIC,MAAJ,CAAW,QAAQF,SAAR,GAAoB,OAA/B,CAAf;AAAA,MACIG,YAAYH,UAAUI,UAAV,CAAqB,CAArB,CADhB;;AAGA,WAAS1D,KAAT,CAAe2D,IAAf,EAAqBnC,CAArB,EAAwB;AACtB,QAAIoC,OAAJ;AAAA,QAAa7C,OAAb;AAAA,QAAsBY,OAAOzB,UAAUyD,IAAV,EAAgB,UAASlC,GAAT,EAAcN,CAAd,EAAiB;AAC5D,UAAIyC,OAAJ,EAAa,OAAOA,QAAQnC,GAAR,EAAaN,IAAI,CAAjB,CAAP;AACbJ,gBAAUU,GAAV,EAAemC,UAAUpC,IAAID,gBAAgBE,GAAhB,EAAqBD,CAArB,CAAJ,GAA8BV,gBAAgBW,GAAhB,CAAvD;AACD,KAH4B,CAA7B;AAIAE,SAAKZ,OAAL,GAAeA,WAAW,EAA1B;AACA,WAAOY,IAAP;AACD;;AAED,WAASzB,SAAT,CAAmByD,IAAnB,EAAyBnC,CAAzB,EAA4B;AAC1B,QAAIG,OAAO,EAAX;AAAA,QAAe;AACXkC,QAAIF,KAAKtB,MADb;AAAA,QAEIyB,IAAI,CAFR;AAAA,QAEW;AACPC,QAAI,CAHR;AAAA,QAGW;AACPC,KAJJ;AAAA,QAIO;AACHC,UAAMJ,KAAK,CALf;AAAA,QAKkB;AACdK,UAAM,KANV,CAD0B,CAOT;;AAEjB;AACA,QAAIP,KAAKD,UAAL,CAAgBG,IAAI,CAApB,MAA2BjD,OAA/B,EAAwC,EAAEiD,CAAF;AACxC,QAAIF,KAAKD,UAAL,CAAgBG,IAAI,CAApB,MAA2BhD,MAA/B,EAAuC,EAAEgD,CAAF;;AAEvC,aAASM,KAAT,GAAiB;AACf,UAAIF,GAAJ,EAAS,OAAOvD,GAAP;AACT,UAAIwD,GAAJ,EAAS,OAAOA,MAAM,KAAN,EAAazD,GAApB;;AAET;AACA,UAAIU,CAAJ;AAAA,UAAOiD,IAAIN,CAAX;AAAA,UAAcO,CAAd;AACA,UAAIV,KAAKD,UAAL,CAAgBU,CAAhB,MAAuBzD,KAA3B,EAAkC;AAChC,eAAOmD,MAAMD,CAAN,IAAWF,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBnD,KAAlC,IAA2CgD,KAAKD,UAAL,CAAgB,EAAEI,CAAlB,MAAyBnD,KAA3E;AACA,YAAI,CAACQ,IAAI2C,CAAL,KAAWD,CAAf,EAAkBI,MAAM,IAAN,CAAlB,KACK,IAAI,CAACI,IAAIV,KAAKD,UAAL,CAAgBI,GAAhB,CAAL,MAA+BlD,OAAnC,EAA4CsD,MAAM,IAAN,CAA5C,KACA,IAAIG,MAAMxD,MAAV,EAAkB;AAAEqD,gBAAM,IAAN,CAAY,IAAIP,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBlD,OAA3B,EAAoC,EAAEkD,CAAF;AAAM;AAC/E,eAAOH,KAAKW,KAAL,CAAWF,IAAI,CAAf,EAAkBjD,IAAI,CAAtB,EAAyBoD,OAAzB,CAAiC,KAAjC,EAAwC,IAAxC,CAAP;AACD;;AAED;AACA,aAAOT,IAAID,CAAX,EAAc;AACZ,YAAI,CAACQ,IAAIV,KAAKD,UAAL,CAAgBvC,IAAI2C,GAApB,CAAL,MAAmClD,OAAvC,EAAgDsD,MAAM,IAAN,CAAhD,KACK,IAAIG,MAAMxD,MAAV,EAAkB;AAAEqD,gBAAM,IAAN,CAAY,IAAIP,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBlD,OAA3B,EAAoC,EAAEkD,CAAF;AAAM,SAA1E,MACA,IAAIO,MAAMZ,SAAV,EAAqB;AAC1B,eAAOE,KAAKW,KAAL,CAAWF,CAAX,EAAcjD,CAAd,CAAP;AACD;;AAED;AACA,aAAO8C,MAAM,IAAN,EAAYN,KAAKW,KAAL,CAAWF,CAAX,EAAcP,CAAd,CAAnB;AACD;;AAED,WAAO,CAACG,IAAIG,OAAL,MAAkBzD,GAAzB,EAA8B;AAC5B,UAAIe,MAAM,EAAV;AACA,aAAOuC,MAAMvD,GAAN,IAAauD,MAAMtD,GAA1B;AAA+Be,YAAIQ,IAAJ,CAAS+B,CAAT,GAAaA,IAAIG,OAAjB;AAA/B,OACA,IAAI3C,KAAK,CAACC,MAAMD,EAAEC,GAAF,EAAOsC,GAAP,CAAP,KAAuB,IAAhC,EAAsC;AACtCpC,WAAKM,IAAL,CAAUR,GAAV;AACD;;AAED,WAAOE,IAAP;AACD;;AAED,WAAS6C,aAAT,CAAuB7C,IAAvB,EAA6BZ,OAA7B,EAAsC;AACpC,WAAOY,KAAKV,GAAL,CAAS,UAASQ,GAAT,EAAc;AAC5B,aAAOV,QAAQE,GAAR,CAAY,UAASe,MAAT,EAAiB;AAClC,eAAOyC,YAAYhD,IAAIO,MAAJ,CAAZ,CAAP;AACD,OAFM,EAEJV,IAFI,CAECgC,SAFD,CAAP;AAGD,KAJM,CAAP;AAKD;;AAED,WAASlD,MAAT,CAAgBuB,IAAhB,EAAsBZ,OAAtB,EAA+B;AAC7B,QAAIA,WAAW,IAAf,EAAqBA,UAAUW,aAAaC,IAAb,CAAV;AACrB,WAAO,CAACZ,QAAQE,GAAR,CAAYwD,WAAZ,EAAyBnD,IAAzB,CAA8BgC,SAA9B,CAAD,EAA2CoB,MAA3C,CAAkDF,cAAc7C,IAAd,EAAoBZ,OAApB,CAAlD,EAAgFO,IAAhF,CAAqF,IAArF,CAAP;AACD;;AAED,WAAShB,UAAT,CAAoBqB,IAApB,EAA0BZ,OAA1B,EAAmC;AACjC,QAAIA,WAAW,IAAf,EAAqBA,UAAUW,aAAaC,IAAb,CAAV;AACrB,WAAO6C,cAAc7C,IAAd,EAAoBZ,OAApB,EAA6BO,IAA7B,CAAkC,IAAlC,CAAP;AACD;;AAED,WAASd,UAAT,CAAoBmB,IAApB,EAA0B;AACxB,WAAOA,KAAKV,GAAL,CAAS0D,SAAT,EAAoBrD,IAApB,CAAyB,IAAzB,CAAP;AACD;;AAED,WAASqD,SAAT,CAAmBlD,GAAnB,EAAwB;AACtB,WAAOA,IAAIR,GAAJ,CAAQwD,WAAR,EAAqBnD,IAArB,CAA0BgC,SAA1B,CAAP;AACD;;AAED,WAASmB,WAAT,CAAqBnF,KAArB,EAA4B;AAC1B,WAAOA,SAAS,IAAT,GAAgB,EAAhB,GACDA,iBAAiBM,IAAjB,GAAwB6C,WAAWnD,KAAX,CAAxB,GACAiE,SAAS5D,IAAT,CAAcL,SAAS,EAAvB,IAA6B,OAAOA,MAAMiF,OAAN,CAAc,IAAd,EAAoB,MAApB,CAAP,GAAqC,IAAlE,GACAjF,KAHN;AAID;;AAED,SAAO;AACLU,WAAOA,KADF;AAELE,eAAWA,SAFN;AAGLE,YAAQA,MAHH;AAILE,gBAAYA,UAJP;AAKLE,gBAAYA;AALP,GAAP;AAOD,C;;;;;;;;;;;;ACjKD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;;;;;;;;;;;;ACFA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAEA,IAAIoE,MAAM9E,oDAAGA,CAAC,IAAJ,CAAV;;AAEO,IAAI+E,WAAWD,IAAI5E,KAAnB;AACA,IAAI8E,eAAeF,IAAI1E,SAAvB;AACA,IAAI6E,YAAYH,IAAIxE,MAApB;AACA,IAAI4E,gBAAgBJ,IAAItE,UAAxB;AACA,IAAI2E,gBAAgBL,IAAIpE,UAAxB,C;;;;;;;;;;;;;;;;;;;;;;;ACRP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;;AAGO,IAAM0E,cAAc,aAApB;;AAEP;;;AAGO,IAAMC,SAAS,QAAf;;AAEP;;;AAGO,IAAMC,iBAAiB;AAC1BC,YAAQ,QADkB;AAE1BC,aAAS,SAFiB;AAG1BC,aAAS,OAHiB;AAI1BC,aAAS,SAJiB;AAK1BC,aAAS,oBALiB;AAM1BC,SAAK,KANqB;AAO1BC,UAAM;AAPoB,CAAvB;;AAUA,IAAMC,QAAQ;AACjBC,WAAO,OADU;AAEjBC,eAAW,WAFM;AAGjBC,gBAAY,YAHK;AAIjBC,aAAS,SAJQ;AAKjBC,eAAW;AALM,CAAd;;AAQA,IAAMC,oBAAoB;AAC7BC,SAAK,KADwB;AAE7BC,QAAI;AAFyB,CAA1B,C;;;;;;;;;;;;;;;;;;;AChCP;AACA;;IAEMC,kB;AACF,kCAAc;AAAA;;AACV,aAAKC,KAAL,GAAa,IAAIC,GAAJ,EAAb;AACA,aAAKC,UAAL,CAAgB,KAAKC,qBAAL,EAAhB;AACH;;;;gDAEuB;AACpB,mBAAO,CACH,IAAIC,qEAAJ,EADG,EAEH,IAAIC,oEAAJ,EAFG,EAGH,IAAIC,gEAAJ,EAHG,EAIH,IAAIC,oEAAJ,EAJG,CAAP;AAMH;;AAED;;;;;;;;qCAK4B;AAAA;;AAAA,gBAAjBL,WAAiB,uEAAJ,EAAI;;AACxBA,wBAAWzE,OAAX,CAAmB;AAAA,uBAAa,MAAKuE,KAAL,CAAWQ,GAAX,CAAeC,UAAUC,IAAzB,EAA+BD,SAA/B,CAAb;AAAA,aAAnB;AACA,mBAAO,KAAKT,KAAZ;AACH;;AAED;;;;;;;;iCAKSS,S,EAAW;AAChB,gBAAIA,qBAAqBE,4DAAzB,EAAwC;AACpC,qBAAKX,KAAL,CAAWQ,GAAX,CAAeC,UAAUC,IAAzB,EAA+BD,SAA/B;AACA,uBAAO,IAAP;AACH;AACD,mBAAO,IAAP;AACH;;AAED;;;;;;;;mCAMWA,S,EAAW;AAClB,iBAAKT,KAAL,CAAWY,MAAX,CAAkBH,UAAUC,IAA5B;AACA,mBAAO,IAAP;AACH;;;4BAEG9F,I,EAAM;AACN,gBAAI,KAAKoF,KAAL,CAAWa,GAAX,CAAejG,IAAf,CAAJ,EAA0B;AACtB,uBAAO,KAAKoF,KAAL,CAAWc,GAAX,CAAelG,IAAf,CAAP;AACH;AACD,mBAAO,IAAP;AACH;;;;;;AAIL,IAAMmG,iBAAkB,YAAY;AAChC,QAAIf,QAAQ,IAAZ;;AAEA,aAASgB,QAAT,GAAqB;AACjBhB,gBAAQ,IAAID,kBAAJ,EAAR;AACA,eAAOC,KAAP;AACH;AACD,WAAOA,SAASgB,UAAhB;AACH,CARuB,EAAxB;;AAUeD,6EAAf,E;;;;;;;;;;;;;;;;;;;;;;;;ACvEA;AACA;AACA;;IAEqBR,iB;;;AACjB,iCAAc;AAAA;;AAAA,qIACJU,0DAAUA,CAACC,IADP;AAEb;;;;gCAEOC,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOH,oEAAIA,CAACC,IAAL,EAAWC,MAAX,EAAmBC,OAAnB,CAAP;AACH;;;;EAP0CV,4D;;AAA1BJ,gF;;;;;;;;;;;;;;;;;;;;;;;;ACJrB;AACA;AACA;;IAEqBF,iB;;;AACjB,iCAAc;AAAA;;AAAA,qIACJY,0DAAUA,CAACK,OADP;AAEb;;;;gCAEOH,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOE,8DAAMA,CAACJ,IAAP,EAAaC,MAAb,EAAqBC,OAArB,CAAP;AACH;;;;EAP0CV,4D;;AAA1BN,gF;;;;;;;;;;;;;;;;;;;;;;;;ACJrB;AACA;AACA;;IAEqBD,kB;;;AACjB,kCAAc;AAAA;;AAAA,uIACJa,0DAAUA,CAACO,OADP;AAEb;;;;gCAEOL,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOI,8DAAMA,CAACN,IAAP,EAAaC,MAAb,EAAqBC,OAArB,CAAP;AACH;;;;EAP2CV,4D;;AAA3BP,iF;;;;;;;;;;;;ACJrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;ACFA;AACA;AACA;;IAEqBE,a;;;AACjB,6BAAc;AAAA;;AAAA,6HACJW,0DAAUA,CAACS,SADP;AAEb;;;;gCAEOP,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOM,gEAAQA,CAACR,IAAT,EAAeC,MAAf,EAAuBC,OAAvB,CAAP;AACH;;;;EAPsCV,4D;;AAAtBL,4E;;;;;;;;;;;;ACJrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;;;;;;;;;;;;;;;;;;ACDA;;;IAGqBK,a;AACjB,2BAAYD,IAAZ,EAAkB;AAAA;;AACd,aAAKkB,KAAL,GAAalB,IAAb;AACH;;;;kCAMS;AACN,kBAAM,IAAImB,KAAJ,CAAU,iCAAV,CAAN;AACH;;;4BANU;AACP,mBAAO,KAAKD,KAAZ;AACH;;;;;;AAPgBjB,4E;;;;;;;;;;;;ACHrB;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;AAEA;;;;;;;AAOA,SAASmB,IAAT,CAAeX,IAAf,EAAqBC,MAArB,EAA6BC,OAA7B,EAAsC;AAClC,QAAMnB,aAAa,EAAEyB,4DAAF,EAAYF,wDAAZ,EAAoBF,wDAApB,EAAnB;AACA,QAAMQ,aAAaC,+DAAgBA,CAACb,IAAjB,CAAnB;;AAEA,QAAI,CAACY,UAAL,EAAiB;AACb,cAAM,IAAIF,KAAJ,CAAU,kCAAV,CAAN;AACH;;AAED,WAAO3B,WAAW6B,UAAX,EAAuBZ,IAAvB,EAA6BC,MAA7B,EAAqCC,OAArC,CAAP;AACH;;AAEcS,mEAAf,E;;;;;;;;;;;;;;;;ACvBA;;AAEA;;;;;;;;;;;;;;;;;AAiBA,SAASP,MAAT,CAAgBU,GAAhB,EAAqBb,MAArB,EAA6BC,OAA7B,EAAsC;AAClC,QAAI,CAACrF,MAAMkG,OAAN,CAAcd,MAAd,CAAL,EAA4B;AACxB,cAAM,IAAIS,KAAJ,CAAU,+CAAV,CAAN;AACH;AACD,QAAMM,gBAAgB;AAClBC,wBAAgB;AADE,KAAtB;AAGA,QAAMC,eAAejB,OAAOzG,GAAP,CAAW;AAAA,eAAc2H,WAAW1H,IAAzB;AAAA,KAAX,CAArB;AACAyG,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBJ,aAAlB,EAAiCd,OAAjC,CAAV;;AAEA,QAAM5G,UAAU,EAAhB;AACA,QAAMkB,OAAO6G,0DAAWA,CAAC/H,OAAZ,CAAb;;AAEA,QAAIgI,UAAUJ,YAAd;AACA,QAAIhB,QAAQe,cAAZ,EAA4B;AACxB;AACA;AACAK,kBAAUR,IAAIS,MAAJ,CAAW,CAAX,EAAc,CAAd,EAAiB,CAAjB,CAAV;AACH;AACD;AACA,QAAMC,YAAYF,QAAQG,MAAR,CAAe,UAACC,GAAD,EAAMC,CAAN,EAASjI,CAAT;AAAA,eAC7BU,OAAOgH,MAAP,CAAcM,GAAd,sBAAsBC,CAAtB,EAA0BjI,CAA1B,EAD6B;AAAA,KAAf,EAEf,EAFe,CAAlB;;AAIAoH,QAAIxG,OAAJ,CAAY,UAACsH,MAAD,EAAY;AACpB,YAAMC,QAAQ,EAAd;AACAX,qBAAa5G,OAAb,CAAqB,UAACwH,WAAD,EAAiB;AAClC,gBAAMC,YAAYP,UAAUM,WAAV,CAAlB;AACAD,kBAAMrH,IAAN,CAAWoH,OAAOG,SAAP,CAAX;AACH,SAHD;AAIA,eAAOvH,sBAAQqH,KAAR,CAAP;AACH,KAPD;AAQA,WAAO,CAACX,YAAD,EAAe5H,OAAf,CAAP;AACH;;AAEc8G,qEAAf,E;;;;;;;;;;;;ACtDA;AAAA;AAAA;AAAA;AACA;;AAEA;;;;;;;;;;;;;;;;;;;;;AAqBA,SAASE,MAAT,CAAiB0B,GAAjB,EAAsB/B,MAAtB,EAA8BC,OAA9B,EAAuC;AACnC,QAAMc,gBAAgB;AAClBC,wBAAgB,IADE;AAElBgB,wBAAgB;AAFE,KAAtB;AAIA/B,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBJ,aAAlB,EAAiCd,OAAjC,CAAV;;AAEA,QAAM7H,MAAM6J,wDAAKA,CAAChC,QAAQ+B,cAAd,CAAZ;AACA,WAAO7B,wDAAMA,CAAC/H,IAAII,SAAJ,CAAcuJ,GAAd,CAAP,EAA2B/B,MAA3B,EAAmCC,OAAnC,CAAP;AACH;;AAEcI,qEAAf,E;;;;;;;;;;;;ACnCA;AAAA;AAAA;;AAEA;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BA,SAASE,QAAT,CAAmBM,GAAnB,EAAwBb,MAAxB,EAAgC;AAC5B,QAAI,CAACpF,MAAMkG,OAAN,CAAcd,MAAd,CAAL,EAA4B;AACxB,cAAM,IAAIS,KAAJ,CAAU,+CAAV,CAAN;AACH;;AAED,QAAMyB,SAAS,EAAf;AACA,QAAIzI,IAAI,CAAR;AACA,QAAI0I,uBAAJ;AACA,QAAM9I,UAAU,EAAhB;AACA,QAAMkB,OAAO6G,0DAAWA,CAAC/H,OAAZ,CAAb;AACA,QAAM+I,mBAAmBpC,OAAOzG,GAAP,CAAW;AAAA,eAAc2H,WAAW1H,IAAzB;AAAA,KAAX,CAAzB;;AAEAqH,QAAIxG,OAAJ,CAAY,UAACgI,IAAD,EAAU;AAClB,YAAMV,SAAS,EAAf;AACAS,yBAAiB/H,OAAjB,CAAyB,UAAC6G,UAAD,EAAgB;AACrC,gBAAIA,cAAcgB,MAAlB,EAA0B;AACtBC,iCAAiBD,OAAOhB,UAAP,CAAjB;AACH,aAFD,MAEO;AACHgB,uBAAOhB,UAAP,IAAqBzH,GAArB;AACA0I,iCAAiB1I,IAAI,CAArB;AACH;AACDkI,mBAAOQ,cAAP,IAAyBE,KAAKnB,UAAL,CAAzB;AACH,SARD;AASA3G,8BAAQoH,MAAR;AACH,KAZD;;AAcA,WAAO,CAACxH,OAAOmI,IAAP,CAAYJ,MAAZ,CAAD,EAAsB7I,OAAtB,CAAP;AACH;;AAEckH,uEAAf,E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACzDA;;AAEA;AACA;AAYA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;;;;;;IAYMgC,S;;;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,yBAAsB;AAAA;;AAAA;;AAAA,0CAANC,IAAM;AAANA,gBAAM;AAAA;;AAAA,qJACTA,IADS;;AAGlB,cAAKC,cAAL,GAAsB,EAAtB;AAHkB;AAIrB;;AAED;;;;;;;;;;;;;;;AA0CA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;gCAgCSxC,O,EAAS;AACd,gBAAMyC,aAAa;AACfC,uBAAO,KADQ;AAEfC,2BAAW,IAFI;AAGfC,yBAAS,KAHM;AAIfC,8BAAc,KAJC;AAKfC,sBAAM;AALS,aAAnB;AAOA9C,sBAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBuB,UAAlB,EAA8BzC,OAA9B,CAAV;AACA,gBAAM0B,SAAS,KAAKqB,oBAAL,GAA4BrB,MAA3C;;AAEA,gBAAMsB,gBAAgBC,qDAAWA,CAACC,IAAZ,CAClB,IADkB,EAElB,KAAKH,oBAAL,GAA4BrB,MAFV,EAGlB,KAAKyB,WAHa,EAIlBnD,QAAQ6C,YAAR,GAAuBnB,OAAOpI,GAAP,CAAW;AAAA,uBAAK8J,EAAE7J,IAAF,EAAL;AAAA,aAAX,EAA0BI,IAA1B,EAAvB,GAA0D,KAAK0J,cAJ7C,EAKlBrD,QAAQ8C,IALU,EAMlB;AACIQ,4BAAYtD,QAAQ0C,KAAR,KAAkB,QADlC;AAEIa,wBAAQ,CAAC,CAACvD,QAAQ4C;AAFtB,aANkB,CAAtB;;AAYA,gBAAI,CAAC5C,QAAQ2C,SAAb,EAAwB;AACpB,uBAAOK,aAAP;AACH;;AAzBa,2BA2BQhD,OA3BR;AAAA,gBA2BN2C,SA3BM,YA2BNA,SA3BM;AAAA,gBA4BN7C,IA5BM,GA4BiBkD,aA5BjB,CA4BNlD,IA5BM;AAAA,gBA4BAC,MA5BA,GA4BiBiD,aA5BjB,CA4BAjD,MA5BA;AAAA,gBA4BQyD,IA5BR,GA4BiBR,aA5BjB,CA4BQQ,IA5BR;;AA6Bd,gBAAMC,aAAa1D,OAAOzG,GAAP,CAAY;AAAA,uBAAKoK,EAAEnK,IAAP;AAAA,aAAZ,CAAnB;AACA,gBAAMoK,gBAAgBzJ,OAAOmI,IAAP,CAAYM,SAAZ,CAAtB;AACA,gBAAMiB,cAAcD,cAAcpC,MAAd,CAAqB,UAACC,GAAD,EAAMqC,IAAN,EAAe;AACpD,oBAAMC,MAAML,WAAWM,OAAX,CAAmBF,IAAnB,CAAZ;AACA,oBAAIC,QAAQ,CAAC,CAAb,EAAgB;AACZtC,wBAAIlH,IAAJ,CAAS,CAACwJ,GAAD,EAAMnB,UAAUkB,IAAV,CAAN,CAAT;AACH;AACD,uBAAOrC,GAAP;AACH,aANmB,EAMjB,EANiB,CAApB;;AAQA,gBAAIxB,QAAQ0C,KAAR,KAAkB,QAAtB,EAAgC;AAC5BkB,4BAAYxJ,OAAZ,CAAoB,UAAC4J,IAAD,EAAU;AAC1B,wBAAMC,OAAOD,KAAK,CAAL,CAAb;AACA,wBAAME,QAAQF,KAAK,CAAL,CAAd;;AAEAlE,yBAAKmE,IAAL,EAAW7J,OAAX,CAAmB,UAAC+J,KAAD,EAAQC,QAAR,EAAqB;AACpCtE,6BAAKmE,IAAL,EAAWG,QAAX,IAAuBF,MAAMhB,IAAN,CACnBmB,SADmB,EAEnBF,KAFmB,EAGnBX,KAAKY,QAAL,CAHmB,EAInBrE,OAAOkE,IAAP,CAJmB,CAAvB;AAMH,qBAPD;AAQH,iBAZD;AAaH,aAdD,MAcO;AACHnE,qBAAK1F,OAAL,CAAa,UAAC+J,KAAD,EAAQC,QAAR,EAAqB;AAC9BR,gCAAYxJ,OAAZ,CAAoB,UAAC4J,IAAD,EAAU;AAC1B,4BAAMC,OAAOD,KAAK,CAAL,CAAb;AACA,4BAAME,QAAQF,KAAK,CAAL,CAAd;;AAEAG,8BAAMF,IAAN,IAAcC,MAAMhB,IAAN,CACVmB,SADU,EAEVF,MAAMF,IAAN,CAFU,EAGVT,KAAKY,QAAL,CAHU,EAIVrE,OAAOkE,IAAP,CAJU,CAAd;AAMH,qBAVD;AAWH,iBAZD;AAaH;;AAED,mBAAOjB,aAAP;AACH;;AAED;;;;;;;;kCAKW;AACP,gBAAMsB,aAAa,KAAKnB,WAAxB;AACA,gBAAMoB,MAAM,EAAZ;;AAEA,gBAAID,WAAW5J,MAAf,EAAuB;AACnB,oBAAM8J,WAAWF,WAAWG,KAAX,CAAiB,GAAjB,CAAjB;;AAEAD,yBAASpK,OAAT,CAAiB,UAAC+E,GAAD,EAAS;AAAA,yCACHA,IAAIsF,KAAJ,CAAU,GAAV,EAAenL,GAAf,CAAmBoL,MAAnB,CADG;AAAA;AAAA,wBACjBC,KADiB;AAAA,wBACVC,GADU;;AAGtBA,0BAAMA,QAAQP,SAAR,GAAoBO,GAApB,GAA0BD,KAAhC;AACAJ,wBAAIjK,IAAJ,+BAAYK,MAAMiK,MAAMD,KAAN,GAAc,CAApB,EAAuBE,IAAvB,GAA8BvL,GAA9B,CAAkC,UAACwL,CAAD,EAAIhB,GAAJ;AAAA,+BAAYa,QAAQb,GAApB;AAAA,qBAAlC,CAAZ;AACH,iBALD;AAMH;;AAED,mBAAOS,GAAP;AACH;AACD;;;;;;;;;;;;;;;;;;;;;;;;;;;gCAwBSQ,S,EAAwD;AAAA,gBAA7CC,QAA6C,uEAAlC,EAAkC;AAAA,gBAA9BC,MAA8B,uEAArB,EAAEC,WAAW,IAAb,EAAqB;;AAC7D,gBAAMC,qBAAmBJ,UAAUpL,IAAV,EAAzB;AACA,gBAAIyL,SAAS,CAAC,IAAD,EAAOL,SAAP,EAAkBC,QAAlB,CAAb;AACA,gBAAMK,eAAeC,mEAAWF,MAAX,CAArB;;AAEAG,8EAAkBA,CACd,IADJ,EAEIF,YAFJ,EAGI5H,yDAAcA,CAACG,OAHnB,EAII,EAAEmH,oBAAF,EAAaI,4BAAb,EAA4BK,gBAAgBC,4DAAYA,CAACD,cAAb,EAA5C,EAJJ,EAKIR,QALJ;;AAQA,gBAAIC,OAAOC,SAAX,EAAsB;AAClBG,6BAAaK,SAAb,CAAuB,IAAvB;AACH,aAFD,MAEO;AACHL,6BAAaK,SAAb,CAAuB,IAAvB;AACH;;AAED,mBAAOL,YAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6BAmDMM,c,EAA+C;AAAA,gBAA/BV,MAA+B,uEAAtB,EAAEC,WAAW,KAAb,EAAsB;;AACjD,gBAAMU,UAAU,KAAKC,OAAL,CAAa;AACzBnD,uBAAO,KADkB;AAEzBI,sBAAM6C;AAFmB,aAAb,CAAhB;AAIA,gBAAM1D,SAAS2D,QAAQ7F,MAAR,CAAezG,GAAf,CAAmB;AAAA,uBAASqI,MAAMpI,IAAf;AAAA,aAAnB,CAAf;AACA,gBAAMuM,eAAe,CAAC7D,MAAD,EAASlF,MAAT,CAAgB6I,QAAQ9F,IAAxB,CAArB;;AAEA,gBAAMiG,WAAW,IAAI,KAAKC,WAAT,CAAqBF,YAArB,EAAmCF,QAAQ7F,MAA3C,EAAmD,EAAEW,YAAY,QAAd,EAAnD,CAAjB;;AAEA6E,8EAAkBA,CACd,IADJ,EAEIQ,QAFJ,EAGItI,yDAAcA,CAACO,IAHnB,EAIIiH,MAJJ,EAKIU,cALJ;;AAQA,gBAAIV,OAAOC,SAAX,EAAsB;AAClBa,yBAASL,SAAT,CAAmB,IAAnB;AACH,aAFD,MAEO;AACHK,yBAASL,SAAT,CAAmB,IAAnB;AACH;;AAED,mBAAOK,QAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;kCAqBW1G,I,EAAMW,O,EAAS;AACtBX,mBAAOA,QAAQ,KAAK4G,WAApB;AACAjG,sBAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkB,EAAEa,gBAAgB,GAAlB,EAAlB,EAA2C/B,OAA3C,CAAV;;AAEA,gBAAM0B,SAAS,KAAKwE,aAAL,GAAqBxE,MAApC;AACA,gBAAMyE,UAAUzE,OAAOpI,GAAP,CAAW;AAAA,uBAAKO,EAAEuM,aAAF,EAAL;AAAA,aAAX,CAAhB;AACA,gBAAMC,YAAYF,QAAQ,CAAR,EAAWzL,MAA7B;AACA,gBAAI4L,uBAAJ;AACA,gBAAIC,eAAJ;AACA,gBAAIC,eAAJ;;AAEA,gBAAInH,SAASO,iDAAUA,CAACS,SAAxB,EAAmC;AAC/BiG,iCAAiB,EAAjB;AACA,qBAAKC,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,MAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,4BAAI4H,OAAO8E,MAAP,EAAejN,IAAf,EAAJ,IAA6B4M,QAAQK,MAAR,EAAgBD,MAAhB,CAA7B;AACH;AACDD,mCAAehM,IAAf,CAAoBR,GAApB;AACH;AACJ,aATD,MASO,IAAIuF,SAASO,iDAAUA,CAACO,OAAxB,EAAiC;AACpCmG,iCAAiB,CAAC5E,OAAOpI,GAAP,CAAW;AAAA,2BAAKO,EAAEN,IAAF,EAAL;AAAA,iBAAX,EAA0BI,IAA1B,CAA+BqG,QAAQ+B,cAAvC,CAAD,CAAjB;AACA,qBAAKwE,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,OAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,6BAAIQ,IAAJ,CAAS6L,QAAQK,MAAR,EAAgBD,MAAhB,CAAT;AACH;AACDD,mCAAehM,IAAf,CAAoBR,KAAIH,IAAJ,CAASqG,QAAQ+B,cAAjB,CAApB;AACH;AACDuE,iCAAiBA,eAAe3M,IAAf,CAAoB,IAApB,CAAjB;AACH,aAVM,MAUA,IAAI0F,SAASO,iDAAUA,CAACK,OAAxB,EAAiC;AACpCqG,iCAAiB,CAAC5E,OAAOpI,GAAP,CAAW;AAAA,2BAAKO,EAAEN,IAAF,EAAL;AAAA,iBAAX,CAAD,CAAjB;AACA,qBAAKgN,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,QAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,8BAAIQ,IAAJ,CAAS6L,QAAQK,MAAR,EAAgBD,MAAhB,CAAT;AACH;AACDD,mCAAehM,IAAf,CAAoBR,KAApB;AACH;AACJ,aATM,MASA;AACH,sBAAM,IAAI0G,KAAJ,gBAAuBnB,IAAvB,uBAAN;AACH;;AAED,mBAAOiH,cAAP;AACH;;;iCAES3E,K,EAAO;AACb,gBAAM8E,YAAY9E,MAAMpI,IAAN,EAAlB;AACA,iBAAK8J,cAAL,UAA2BoD,SAA3B;AACA,gBAAMC,oBAAoB,KAAKC,kBAA/B;AACA,gBAAMC,qBAAqBF,kBAAkBG,mBAA7C;AACA,gBAAMT,gBAAgBzE,MAAMyE,aAAN,EAAtB;AACA,gBAAMR,UAAUjE,MAAMmF,YAAN,CAAmBhH,IAAnC;;AAEA,gBAAI,CAAC4G,kBAAkBK,SAAlB,GAA8BpF,MAAMpI,IAAN,EAA9B,CAAL,EAAkD;AAC9CmN,kCAAkBhF,MAAlB,CAAyBpH,IAAzB,CAA8BqH,KAA9B;AACAiF,mCAAmBxM,OAAnB,CAA2B,UAAC4M,GAAD,EAAMxN,CAAN,EAAY;AACnCwN,wBAAIrF,MAAMpI,IAAN,EAAJ,IAAoB,IAAI0N,8CAAJ,CAAUb,cAAc5M,CAAd,CAAV,EAA4BoM,QAAQpM,CAAR,CAA5B,EAAwCmI,KAAxC,CAApB;AACH,iBAFD;AAGH,aALD,MAKO;AACH,oBAAMuF,aAAaR,kBAAkBhF,MAAlB,CAAyByF,SAAzB,CAAmC;AAAA,2BAAaC,UAAU7N,IAAV,OAAqBkN,SAAlC;AAAA,iBAAnC,CAAnB;AACAS,8BAAc,CAAd,KAAoBR,kBAAkBhF,MAAlB,CAAyBwF,UAAzB,IAAuCvF,KAA3D;AACH;;AAED;AACA+E,8BAAkBW,gBAAlB,GAAqC,IAArC;AACAX,8BAAkBY,gBAAlB,GAAqC,IAArC;AACAZ,8BAAkBa,cAAlB,GAAmC,IAAnC;;AAEA,iBAAKC,qBAAL,GAA6BC,qBAA7B;AACA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0CAoCmB1H,M,EAAQ2H,U,EAAYzC,M,EAAQ;AAAA;;AAC3ClF,qBAAS4H,kEAAkBA,CAAC5H,MAAnB,CAAT;AACAkF,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkB,EAAEgE,WAAW,IAAb,EAAmB0C,YAAY,KAA/B,EAAlB,EAA0D3C,MAA1D,CAAT;;AAEA,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;AACA,gBAAMC,UAAUL,WAAW/K,KAAX,CAAiB,CAAjB,EAAoB+K,WAAWhN,MAAX,GAAoB,CAAxC,CAAhB;AACA,gBAAMsN,aAAaN,WAAWA,WAAWhN,MAAX,GAAoB,CAA/B,CAAnB;;AAEA,gBAAImN,aAAa9H,OAAOxG,IAApB,KAA6B,CAAC0L,OAAO2C,UAAzC,EAAqD;AACjD,sBAAM,IAAIpH,KAAJ,CAAaT,OAAOxG,IAApB,wCAAN;AACH;;AAED,gBAAM0O,kBAAkBF,QAAQzO,GAAR,CAAY,UAACqI,KAAD,EAAW;AAC3C,oBAAMuG,YAAYL,aAAalG,KAAb,CAAlB;AACA,oBAAI,CAACuG,SAAL,EAAgB;AACZ;AACA,0BAAM,IAAI1H,KAAJ,CAAamB,KAAb,kCAAN;AACH;AACD,uBAAOuG,UAAUC,KAAjB;AACH,aAPuB,CAAxB;;AASA,gBAAMC,QAAQ,KAAKA,KAAL,CAAWnD,OAAOC,SAAlB,CAAd;;AAEA,gBAAMmD,KAAKD,MAAMlC,aAAN,GAAsBxE,MAAjC;AACA,gBAAM4G,iBAAiBL,gBAAgB3O,GAAhB,CAAoB;AAAA,uBAAO+O,GAAGvE,GAAH,CAAP;AAAA,aAApB,CAAvB;;AAEA,gBAAIyE,cAAc,EAAlB;AACA,gBAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,uBAAM,OAAKC,YAAL,EAAN;AAAA,aAApB;;AAEA,gBAAMC,iBAAiB,EAAvB;AACAC,gFAAkBA,CAACP,MAAMjF,WAAzB,EAAsC,UAAC3J,CAAD,EAAO;AACzC,oBAAMoP,aAAaN,eAAehP,GAAf,CAAmB;AAAA,2BAASqI,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAAT;AAAA,iBAAnB,CAAnB;AACAkP,+BAAelP,CAAf,IAAoBwO,+CAAcY,UAAd,UAA0BpP,CAA1B,EAA6BgP,aAA7B,EAA4CD,WAA5C,GAApB;AACH,aAHD;;AA9B2C,gCAkC3BM,mEAAYA,CAAC,CAACH,cAAD,CAAb,EAA+B,CAAC3I,MAAD,CAA/B,EAAyC,CAACA,OAAOxG,IAAR,CAAzC,CAlC2B;AAAA;AAAA,gBAkCpCoI,KAlCoC;;AAmC3CyG,kBAAMU,QAAN,CAAenH,KAAf;;AAEA4D,8EAAkBA,CACd,IADJ,EAEI6C,KAFJ,EAGI3K,yDAAcA,CAACK,OAHnB,EAII,EAAEmH,QAAQlF,MAAV,EAAkB2B,QAAQqG,OAA1B,EAJJ,EAKIC,UALJ;;AAQA,mBAAOI,KAAP;AACH;;AAED;;;;;;;;;;;kCAQWW,W,EAA2D;AAAA,gBAA9C9D,MAA8C,uEAArC,EAAqC;AAAA,gBAAjC+D,cAAiC;AAAA,gBAAjBC,UAAiB,uEAAJ,EAAI;;AAClE,gBAAMC,kBAAkBjE,OAAOiE,eAA/B;AACA,gBAAMC,sBAAsBlE,OAAOmE,QAAnC;AACA,gBAAMC,UAAUpE,OAAOoE,OAAvB;AACA,gBAAMC,YAAYC,gEAAgBA,CAAC,IAAjB,CAAlB;AACA,gBAAMC,uBAAuBF,UAAUG,qBAAvC;AACA,gBAAMC,mBAAmBC,mEAAmBA,CAAC,IAApB,CAAzB;AACA,gBAAMC,aAAa;AACfC,8BAAcH,gBADC;AAEfI,uBAAOR;AAFQ,aAAnB;;AAKAN,8BAAkBe,kEAAkBA,CAACP,oBAAnB,EAAyCvE,MAAzC,EAAiD,IAAjD,CAAlB;AACA+E,oFAAwBA,CAACjB,WAAzB,EAAsCa,UAAtC,EAAkD,EAAEJ,0CAAF,EAAwBJ,UAAUD,mBAAlC,EAAlD,EACIjP,OAAOgH,MAAP,CAAc;AACVmI;AADU,aAAd,EAEGpE,MAFH,CADJ;;AAKA,gBAAIiE,eAAJ,EAAqB;AACjBe,yFAAyBA,CAACT,oBAA1B,EAAgDI,UAAhD,EAA4D;AACxD3E,kCADwD;AAExDgE;AAFwD,iBAA5D,EAGG,IAHH;AAIH;;AAED,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;2BAOIiB,S,EAAWC,Q,EAAU;AACrB,oBAAQD,SAAR;AACA,qBAAK3M,sDAAL;AACI,yBAAKiF,cAAL,CAAoBlI,IAApB,CAAyB6P,QAAzB;AACA;AAHJ;AAKA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;oCAMaD,S,EAAW;AACpB,oBAAQA,SAAR;AACA,qBAAK3M,sDAAL;AACI,yBAAKiF,cAAL,GAAsB,EAAtB;AACA;;AAHJ;AAMA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;0CAOmB4H,S,EAAWf,O,EAAS;AAAA;;AACnC,gBAAIgB,gBAAgB,KAAK7H,cAAzB;AACA6H,0BAAcjQ,OAAd,CAAsB;AAAA,uBAAMkQ,GAAGpH,IAAH,CAAQ,MAAR,EAAckH,SAAd,EAAyBf,OAAzB,CAAN;AAAA,aAAtB;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BA2CKkB,gB,EAAkBtF,M,EAAQ;AAC3B,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;;AAEA,gBAAI,CAACD,aAAa0C,gBAAb,CAAL,EAAqC;AACjC,sBAAM,IAAI/J,KAAJ,YAAmB+J,gBAAnB,qBAAN;AACH;;AAED,gBAAMC,eAAevF,OAAO1L,IAAP,IAAkBgR,gBAAlB,YAArB;;AAEA,gBAAI1C,aAAa2C,YAAb,CAAJ,EAAgC;AAC5B,sBAAM,IAAIhK,KAAJ,YAAmBgK,YAAnB,qBAAN;AACH;;AAED,gBAAMC,eAAe,KAAKvE,aAAL,GAAqBa,SAArB,GAAiCwD,gBAAjC,CAArB;;AAb2B,wCAcEG,sFAAqBA,CAACD,YAAtB,EAAoC,KAAKtH,WAAzC,EAAsD8B,MAAtD,CAdF;AAAA,gBAcnB0F,UAdmB,yBAcnBA,UAdmB;AAAA,gBAcPC,IAdO,yBAcPA,IAdO;;AAgB3B,gBAAMC,WAAWhC,mEAAYA,CAAC,CAAC8B,UAAD,CAAb,EAA2B,CACxC;AACIpR,sBAAMiR,YADV;AAEInL,sBAAMyL,gDAASA,CAACC,SAFpB;AAGIC,yBAASC,uDAAgBA,CAACC,MAH9B;AAIIN;AAJJ,aADwC,CAA3B,EAMT,CAACJ,YAAD,CANS,EAMO,CANP,CAAjB;;AAQA,gBAAMpC,QAAQ,KAAKA,KAAL,CAAWnD,OAAOC,SAAlB,CAAd;AACAkD,kBAAMU,QAAN,CAAe+B,QAAf;;AAEAtF,8EAAkBA,CACd,IADJ,EAEI6C,KAFJ,EAGI3K,yDAAcA,CAACM,GAHnB,EAIK,EAAEwM,kCAAF,EAAoBtF,cAApB,EAA4BuF,0BAA5B,EAJL,EAKK,IALL;;AAQA,mBAAOpC,KAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;uCAuBgB;AACZ,gBAAMtI,OAAO,KAAKqL,SAAL,CAAevL,iDAAUA,CAACS,SAA1B,CAAb;AACA,gBAAMN,SAAS,KAAKqL,SAAL,EAAf;;AAEA,mBAAO,IAAI9I,SAAJ,CAAcxC,IAAd,EAAoBC,MAApB,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mCA0CYsL,Y,EAAcC,S,EAAWrG,M,EAAQ;AACzC,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;;AAEAuD,yBAAajR,OAAb,CAAqB,UAACqM,SAAD,EAAe;AAChC,oBAAI,CAACoB,aAAapB,SAAb,CAAL,EAA8B;AAC1B,0BAAM,IAAIjG,KAAJ,YAAmBiG,SAAnB,mCAAN;AACH;AACJ,aAJD;;AAMA,gBAAM8E,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;;AAKAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;;AAEA,mBAAO0G,+DAAeA,CAAC,IAAhB,EAAsBN,YAAtB,EAAoCC,SAApC,EAA+CrG,MAA/C,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;wCAoC6D;AAAA,gBAA9C2G,YAA8C,uEAA/B,EAA+B;AAAA,gBAA3BC,YAA2B,uEAAZ,EAAY;AAAA,gBAAR5G,MAAQ;;AACzD,gBAAMsG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;AAIA,gBAAM4G,cAAc,KAAKhE,eAAL,EAApB;AACA,gBAAMiE,YAAY7R,OAAOmI,IAAP,CAAYyJ,WAAZ,CAAlB;AACA,gBAAME,0BAA0B,CAAC,CAACH,YAAD,CAAD,CAAhC;;AAEA5G,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;AACA2G,2BAAeA,aAAalR,MAAb,GAAsBkR,YAAtB,GAAqC,CAAC,EAAD,CAApD;;AAGAA,yBAAaxR,OAAb,CAAqB,UAAC6R,QAAD,EAAWzS,CAAX,EAAiB;AAClCwS,wCAAwBxS,CAAxB,IAA6B0S,sEAAsBA,8BAC3CD,QADqB,sBACRJ,YADQ,IAEzBE,SAFyB,EAGzBD,WAHyB,CAA7B;AAIH,aALD;;AAOA,mBAAOK,gEAAgBA,CAAC,IAAjB,EAAuBH,uBAAvB,EAAgD/G,MAAhD,EAAwD8G,SAAxD,CAAP;AACH;;;;;AAhvBD;;;;;;;;;;;mDAWmC9G,M,EAAQ;AACvC,mBAAOmH,4DAAiBA,CAACC,gBAAlB,CAAmCpH,MAAnC,CAAP;AACH;;;4BA/BsB;AACnB,mBAAOQ,4DAAP;AACH;;AAED;;;;;;4BAGwB;AACpB,mBAAO/F,0DAAP;AACH;;AAED;;;;;;4BAGwB;AACpB,mBAAO4M,sDAAP;AACH;;;;EApEmBC,iD;;AA2zBTjK,wEAAf,E;;;;;;;;;;;;ACr2BA;AAAA;AAAA;;AAEe;AACX5B,gBAAYd,iDAAUA,CAACC;AADZ,CAAf,E;;;;;;;;;;;;ACFA;AAAA;;;;;;;AAOA,IAAMD,aAAa;AACfS,aAAW,UADI;AAEfF,WAAS,QAFM;AAGfF,WAAS,QAHM;AAIfJ,QAAM;AAJS,CAAnB;;AAOeD,yEAAf,E;;;;;;;;;;;;ACdA;AAAA;;;;;;AAMA,IAAMqL,mBAAmB;AACrBuB,eAAa,aADQ;AAErBC,YAAU,UAFW;AAGrBvB,UAAQ;AAHa,CAAzB;;AAMeD,+EAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;;AAOA,IAAMH,YAAY;AACd4B,WAAS,SADK;AAEd3B,aAAW;AAFG,CAAlB;;AAKeD,wEAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;AAMA,IAAMW,gBAAgB;AAClBC,UAAQ,QADU;AAElBiB,WAAS,SAFS;AAGlBC,OAAK;AAHa,CAAtB;;AAMenB,4EAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;AAMA,IAAMoB,qBAAqB;AACvBC,SAAK,KADkB;AAEvBC,SAAK,KAFkB;AAGvBC,SAAK,KAHkB;AAIvBC,SAAK,KAJkB;AAKvBC,WAAO,OALgB;AAMvBC,UAAM,MANiB;AAOvBC,WAAO,OAPgB;AAQvBC,SAAK;AARkB,CAA3B;;AAWeR,iFAAf,E;;;;;;;;;;;;ACjBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;;;;;;;AASA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;ACbA;AAAA;;;;;;AAMA,IAAMS,iBAAiB;AACnBC,cAAY;AADO,CAAvB;;AAIeD,6EAAf,E;;;;;;;;;;;;ACVA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAiBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,IAAME,YAAY;AACdC,8DADc;AAEdC,sDAFc;AAGdC,4DAHc;AAIdC,8DAJc;AAKdtI,8DALc;AAMduI,kFANc;AAOd/K,wDAPc;AAQdgL,wEARc;AASdC,oEATc;AAUdC,sEAVc;AAWdC,0EAXc;AAYdC,4EAZc;AAadC,0EAbc;AAcdC,0DAdc;AAedzF,oFAAkBA;AAfJ,CAAlB;;AAkBA,IAAM0F,UAAUC,0CAAGA,CAACD,OAApB;AACAnU,OAAOgH,MAAP,CAAcoB,kDAAd,EAAyB;AACrBkL,wBADqB;AAErBe,8CAFqB;AAGrB9Q,6EAHqB;AAIrB+Q,+EAJqB;AAKrB5O,qEALqB;AAMrB6L,2EANqB;AAOrBW,mFAPqB;AAQrBiC,oBARqB;AASrB/O,2EATqB;AAUrBmP,uDAAaA;AAVQ,CAAzB,EAWGC,mCAXH;;AAaepM,iHAAf,E;;;;;;;;;;;;AC3DA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;AAEA;;;;;;;AAOA,SAASqM,eAAT,CAAyB7O,IAAzB,EAA+BC,MAA/B,EAAuC;AACnCD,WAAOA,QAAQ,EAAf;;AAEA,QAAIwM,qDAAaA,CAAC9M,GAAd,CAAkBO,OAAOiL,OAAzB,CAAJ,EAAuC;AACnC,eAAOsB,qDAAaA,CAAC7M,GAAd,CAAkBM,OAAOiL,OAAzB,EACU4D,OADV,CAEUnI,SAFV,CAEoB1G,OAAOxG,IAF3B,EAGUwG,MAHV,CAGiBA,MAHjB,EAIUD,IAJV,CAIeA,IAJf,EAKUwE,UALV,SAK0BxE,KAAKpF,MAAL,GAAc,CALxC,GAMUmU,KANV,EAAP;AAOH;AACD,WAAOvC,qDAAaA,CACH7M,GADV,CACcM,OAAOV,IAAP,KAAgByL,gDAASA,CAAC4B,OAA1B,GAAoCY,qDAAcA,CAACC,UAAnD,GAAgEtC,uDAAgBA,CAACuB,WAD/F,EAEUoC,OAFV,CAGUnI,SAHV,CAGoB1G,OAAOxG,IAH3B,EAIUwG,MAJV,CAIiBA,MAJjB,EAKUD,IALV,CAKeA,IALf,EAMUwE,UANV,SAM0BxE,KAAKpF,MAAL,GAAc,CANxC,GAOUmU,KAPV,EAAP;AAQH;;AAGD;;;;;;;AAOO,SAASC,0BAAT,CAAoChI,YAApC,EAAkDxC,UAAlD,EAA8D;AAAA,QACzDvE,MADyD,GAC9C+G,YAD8C,CACzD/G,MADyD;;;AAGjE,QAAIuM,qDAAaA,CAAC9M,GAAd,CAAkBO,OAAOiL,OAAzB,CAAJ,EAAuC;AACnC,eAAOsB,qDAAaA,CAAC7M,GAAd,CAAkBM,OAAOiL,OAAzB,EACU4D,OADV,CAEU9H,YAFV,CAEuBA,YAFvB,EAGUxC,UAHV,CAGqBA,UAHrB,EAIUuK,KAJV,EAAP;AAKH;AACD,WAAOvC,qDAAaA,CACH7M,GADV,CACcM,OAAOV,IAAP,KAAgByL,gDAASA,CAAC4B,OAA1B,GAAoCY,qDAAcA,CAACC,UAAnD,GAAgEtC,uDAAgBA,CAACuB,WAD/F,EAEUoC,OAFV,CAGU9H,YAHV,CAGuBA,YAHvB,EAIUxC,UAJV,CAIqBA,UAJrB,EAKUuK,KALV,EAAP;AAMH;;AAED;;;;;;;;AAQO,SAAShG,YAAT,CAAsBkG,UAAtB,EAAkChP,MAAlC,EAA0CqB,OAA1C,EAAmD;AACtD,QAAM4N,aAAa,EAAnB;;AAEA,QAAI,EAAE5N,WAAWA,QAAQ1G,MAArB,CAAJ,EAAkC;AAC9B0G,kBAAUrB,OAAOzG,GAAP,CAAW;AAAA,mBAAQ8I,KAAK7I,IAAb;AAAA,SAAX,CAAV;AACH;;AAED6H,YAAQhH,OAAR,CAAgB,UAAC6H,MAAD,EAASzI,CAAT,EAAe;AAC3BwV,mBAAW/M,MAAX,IAAqBzI,CAArB;AACH,KAFD;;AAIA,WAAOuG,OAAOzG,GAAP,CAAW;AAAA,eAAQqV,gBAAgBI,WAAWC,WAAW5M,KAAK7I,IAAhB,CAAX,CAAhB,EAAmD6I,IAAnD,CAAR;AAAA,KAAX,CAAP;AACH,C;;;;;;;;;;;;AC9ED;AAAA;AAAA;AAAA;AACA;;AAEA,IAAM6M,aAAa;AACfnP,UAAM,EADS;;AAGfoP,mBAHe,2BAGEC,QAHF,EAGY5V,IAHZ,EAGkB;AAC7B,YAAM6V,SAAS7V,QAAQ8V,0DAAWA,EAAlC;;AAEA,aAAKvP,IAAL,CAAUsP,MAAV,IAAoB;AAChB7V,kBAAM6V,MADU;AAEhB1N,oBAAQyN,QAFQ;;AAIhBpI,qBAJgB,uBAIH;AACT,oBAAIA,YAAY,KAAKM,gBAArB;;AAEA,oBAAI,CAACN,SAAL,EAAgB;AACZA,gCAAY,KAAKM,gBAAL,GAAwB,EAApC;AACA,yBAAK3F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3BoF,kCAAUpF,MAAMpI,IAAN,EAAV,IAA0BoI,KAA1B;AACH,qBAFD;AAGH;AACD,uBAAOoF,SAAP;AACH,aAde;AAehBuI,sBAfgB,wBAeF;AACV,oBAAIC,gBAAgB,KAAKhI,cAAzB;;AAEA,oBAAI,CAACgI,aAAL,EAAoB;AAChBA,oCAAgB,KAAKhI,cAAL,GAAsB,EAAtC;AACA,yBAAK7F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3B,4BAAIA,MAAM5B,MAAN,GAAeV,IAAf,KAAwByL,gDAASA,CAAC4B,OAAtC,EAA+C;AAC3C6C,0CAAc5N,MAAMpI,IAAN,EAAd,IAA8BoI,KAA9B;AACH;AACJ,qBAJD;AAKH;AACD,uBAAO4N,aAAP;AACH,aA3Be;AA4BhBC,wBA5BgB,0BA4BA;AACZ,oBAAIC,kBAAkB,KAAKnI,gBAA3B;;AAEA,oBAAI,CAAC,KAAKA,gBAAV,EAA4B;AACxBmI,sCAAkB,KAAKnI,gBAAL,GAAwB,EAA1C;AACA,yBAAK5F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3B,4BAAIA,MAAM5B,MAAN,GAAeV,IAAf,KAAwByL,gDAASA,CAACC,SAAtC,EAAiD;AAC7C0E,4CAAgB9N,MAAMpI,IAAN,EAAhB,IAAgCoI,KAAhC;AACH;AACJ,qBAJD;AAKH;AACD,uBAAO8N,eAAP;AACH;AAxCe,SAApB;AA0CA,eAAO,KAAK3P,IAAL,CAAUsP,MAAV,CAAP;AACH;AAjDc,CAAnB;;AAoDeH,yEAAf,E;;;;;;;;;;;;;;;;;;;;;;;ACvDA;AACA;;AAEA;;;;;;;;IAOqBS,M;;;;;;;;;;;;AACjB;;;;;;;8CAOuB;AACnB,gBAAMC,UAAU,KAAK7I,YAAL,CAAkB/G,MAAlB,CAAyB6K,IAAzC;AACA,mBAAO,CAAC+E,QAAQ,CAAR,CAAD,EAAaA,QAAQA,QAAQjV,MAAR,GAAiB,CAAzB,CAAb,CAAP;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAKoM,YAAL,CAAkB/G,MAAlB,CAAyB6K,IAAhC;AACH;;;iCAEe;AACZ,mBAAO,IAAIgF,8DAAJ,EAAP;AACH;;;;EAzB+BC,kD;;AAAfH,qE;;;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;AACA;AACA;AACA;;;;;;;;IAOqBI,W;;;;;;;;;;;;AACjB;;;;;;;kCAOW;AACP,mBAAO7E,uDAAgBA,CAACuB,WAAxB;AACH;;AAED;;;;;;;;;;8CAOuB;AAAA;;AACnB,gBAAMuD,OAAO,IAAIC,GAAJ,EAAb;AACA,gBAAMC,SAAS,EAAf;;AAEA;AACAtH,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI,CAACuW,KAAKvQ,GAAL,CAAS2E,KAAT,CAAL,EAAsB;AAClB4L,yBAAKG,GAAL,CAAS/L,KAAT;AACA8L,2BAAO3V,IAAP,CAAY6J,KAAZ;AACH;AACJ,aAND;AAOA,mBAAO8L,MAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIE,mEAAJ,EAAP;AACH;;;;EApCoCN,kD;;AAApBC,0E;;;;;;;;;;;;;;;;;;;;;;;;;;ACXrB;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;IAOqBM,U;;;;;;;;;;;;AACjB;;;;;;;kCAOW;AACP,mBAAO9C,qDAAcA,CAACC,UAAtB;AACH;;AAED;;;;;;;;;;8CAOuB;AAAA;;AACnB,gBAAI8C,MAAM3L,OAAO4L,iBAAjB;AACA,gBAAIC,MAAM7L,OAAO8L,iBAAjB;;AAEA;AACA7H,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI2K,iBAAiBiI,4DAArB,EAAwC;AACpC;AACH;;AAED,oBAAIjI,QAAQkM,GAAZ,EAAiB;AACbA,0BAAMlM,KAAN;AACH;AACD,oBAAIA,QAAQoM,GAAZ,EAAiB;AACbA,0BAAMpM,KAAN;AACH;AACJ,aAZD;;AAcA,mBAAO,CAACkM,GAAD,EAAME,GAAN,CAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIE,kEAAJ,EAAP;AACH;;;;EA3CmCC,gD;;AAAnBN,yE;;;;;;;;;;;;;;;;;;;;;;ACbrB;;AAEA;;;;;;;;IAOqBP,S;;;;;;;;;;;;AACjB;;;;;;;iCAOU;AACN,gBAAI,CAAC,KAAKc,aAAV,EAAyB;AACrB,qBAAKA,aAAL,GAAqB,KAAKC,mBAAL,EAArB;AACH;AACD,mBAAO,KAAKD,aAAZ;AACH;;AAED;;;;;;;;;8CAMuB;AACnB,kBAAM,IAAInQ,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAEA;;;;;;;;;;wCAOgB;AACb,mBAAO,KAAKV,IAAL,EAAP;AACH;;;;EAlCkC+Q,8C;;AAAlBhB,wE;;;;;;;;;;;;;;;;;;;;;;ACTrB;AACA;AACA;AACA;AACA;;IAGMiB,iB;AACF,iCAAc;AAAA;;AACV,aAAKC,UAAL,GAAkB,IAAInS,GAAJ,EAAlB;AACH;;;;0CAEiBoM,O,EAASgG,S,EAAW;AAClC,iBAAKD,UAAL,CAAgB5R,GAAhB,CAAoB6L,OAApB,EAA6BgG,SAA7B;AACA,mBAAO,IAAP;AACH;;;4BAEG3R,I,EAAM;AACN,mBAAO,KAAK0R,UAAL,CAAgBvR,GAAhB,CAAoBH,IAApB,CAAP;AACH;;;4BAEGA,I,EAAM;AACN,mBAAO,KAAK0R,UAAL,CAAgBtR,GAAhB,CAAoBJ,IAApB,CAAP;AACH;;;;;;AAGL,IAAM4R,wBAAwB,SAAxBA,qBAAwB,CAACtS,KAAD,EAAW;AACrCA,UACiBuS,iBADjB,CACmCjG,uDAAgBA,CAACuB,WADpD,EACiEsD,oDADjE,EAEiBoB,iBAFjB,CAEmCjG,uDAAgBA,CAACwB,QAFpD,EAE8D0E,iDAF9D,EAGiBD,iBAHjB,CAGmCjG,uDAAgBA,CAACC,MAHpD,EAG4DwE,+CAH5D,EAIiBwB,iBAJjB,CAImC5D,qDAAcA,CAACC,UAJlD,EAI8D6C,mDAJ9D;AAKH,CAND;;AAQA,IAAM9D,gBAAiB,YAAY;AAC/B,QAAI3N,QAAQ,IAAZ;AACA,aAASgB,QAAT,GAAqB;AACjBhB,gBAAQ,IAAImS,iBAAJ,EAAR;AACAG,8BAAsBtS,KAAtB;AACA,eAAOA,KAAP;AACH;AACD,WAAOA,SAASgB,UAAhB;AACH,CARsB,EAAvB;;AAUe2M,4EAAf,E;;;;;;;;;;;;;;;;;;;AC5CA;AACA;;AAEA;;;;;;;;;;;;;;;;;;;;IAmBqBuE,K;AACjB;;;;;;;AAOA,mBAAa/J,YAAb,EAA2BxC,UAA3B,EAAuC;AAAA;;AACnC,aAAKwC,YAAL,GAAoBA,YAApB;AACA,aAAKxC,UAAL,GAAkBA,UAAlB;AACH;;;;;;AAMD;;;;;;iCAMU;AACN,kBAAM,IAAI9D,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAED;;;;;;;;;iCAMU;AACN,mBAAO,KAAKsG,YAAL,CAAkB/G,MAAzB;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAK+G,YAAL,CAAkBvN,IAAzB;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAKuN,YAAL,CAAkB/G,MAAlB,CAAyBV,IAAhC;AACH;;AAED;;;;;;;;;kCAMW;AACP,mBAAO,KAAKyH,YAAL,CAAkB/G,MAAlB,CAAyBiL,OAAhC;AACH;;AAED;;;;;;;;;sCAMe;AACX,mBAAO,KAAKlE,YAAL,CAAkB/G,MAAlB,CAAyBqR,WAAhC;AACH;;AAED;;;;;;;;;sCAMe;AACX,mBAAO,KAAKtK,YAAL,CAAkB/G,MAAlB,CAAyBsR,WAAzB,IAAwC,KAAKvK,YAAL,CAAkB/G,MAAlB,CAAyBxG,IAAxE;AACH;;AAED;;;;;;;;;+BAMQ;AAAA;;AACJ,gBAAMuG,OAAO,EAAb;AACA6I,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvCsG,qBAAKxF,IAAL,CAAU,MAAKwM,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAV;AACH,aAFD;AAGA,mBAAOsG,IAAP;AACH;;AAED;;;;;;;;;wCAMiB;AACb,kBAAM,IAAIU,KAAJ,CAAU,qBAAV,CAAN;AACH;;;iCAhGe;AACZ,kBAAM,IAAIA,KAAJ,CAAU,qBAAV,CAAN;AACH;;;4BAgGoB;AACjB,gBAAM8Q,UAAU;AACZC,yBAAS,EADG;AAEZC,0BAAU,IAFE;AAGZ/K,yBAHY,qBAGFlN,IAHE,EAGI;AACZ,yBAAKgY,OAAL,CAAahY,IAAb,GAAoBA,IAApB;AACA,2BAAO,IAAP;AACH,iBANW;AAOZwG,sBAPY,kBAOLA,OAPK,EAOG;AACX,yBAAKwR,OAAL,CAAaxR,MAAb,GAAsBA,OAAtB;AACA,2BAAO,IAAP;AACH,iBAVW;AAWZD,oBAXY,gBAWPA,KAXO,EAWD;AACP,yBAAKyR,OAAL,CAAazR,IAAb,GAAoBA,KAApB;AACA,2BAAO,IAAP;AACH,iBAdW;AAeZgH,4BAfY,wBAeCA,aAfD,EAee;AACvB,yBAAKyK,OAAL,CAAazK,YAAb,GAA4BA,aAA5B;AACA,2BAAO,IAAP;AACH,iBAlBW;AAmBZxC,0BAnBY,sBAmBDA,WAnBC,EAmBW;AACnB,yBAAKiN,OAAL,CAAajN,UAAb,GAA0BA,WAA1B;AACA,2BAAO,IAAP;AACH,iBAtBW;AAuBZuK,qBAvBY,mBAuBJ;AACJ,wBAAI/H,eAAe,IAAnB;AACA,wBAAI,KAAKyK,OAAL,CAAazK,YAAb,YAAqC2K,sDAAzC,EAAuD;AACnD3K,uCAAe,KAAKyK,OAAL,CAAazK,YAA5B;AACH,qBAFD,MAEO,IAAI,KAAKyK,OAAL,CAAaxR,MAAb,IAAuB,KAAKwR,OAAL,CAAazR,IAAxC,EAA8C;AACjDgH,uCAAe,IAAI2K,sDAAJ,CAAiB,KAAKF,OAAL,CAAahY,IAA9B,EACK,KAAKgY,OAAL,CAAazR,IADlB,EAEK,KAAKyR,OAAL,CAAaxR,MAFlB,EAGK,KAAKyR,QAAL,CAAcE,MAAd,EAHL,CAAf;AAIH,qBALM,MAMF;AACD,8BAAM,IAAIlR,KAAJ,CAAU,0BAAV,CAAN;AACH;AACD,2BAAO,IAAI,KAAKgR,QAAT,CAAkB1K,YAAlB,EAAgC,KAAKyK,OAAL,CAAajN,UAA7C,CAAP;AACH;AArCW,aAAhB;AAuCA,mBAAOgN,OAAP;AACH;;;;;;AAxJgBT,oE;;;;;;;;;;;;ACtBrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;ACHA;AACA;AACA;;AAEA;;;;;;;;IAOqBH,O;;;;;;;;;;;;AACnB;;;;;;;iCAOY;AACN,gBAAI,CAAC,KAAKC,aAAV,EAAyB;AACrB,qBAAKA,aAAL,GAAqB,KAAKC,mBAAL,EAArB;AACH;AACD,mBAAO,KAAKD,aAAZ;AACH;;AAEH;;;;;;;;;+BAMU;AACJ,mBAAO,KAAK7J,YAAL,CAAkB/G,MAAlB,CAAyB4R,IAAhC;AACH;;AAEH;;;;;;;;;mCAMc;AACR,mBAAO,KAAK7K,YAAL,CAAkB/G,MAAlB,CAAyB6R,QAAzB,IAAqCC,8EAA5C;AACH;;AAEH;;;;;;;;;uCAMkB;AAAA,gBACJC,YADI,GACa,KAAKhL,YAAL,CAAkB/G,MAD/B,CACJ+R,YADI;;AAEZ,mBAAOA,wBAAwBzY,QAAxB,GAAmCyY,YAAnC,GAAkDC,mDAAzD;AACH;;AAEH;;;;;;;;;8CAMyB;AACnB,kBAAM,IAAIvR,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAED;;;;;;;;;;wCAOiB;AACb,mBAAO,KAAKV,IAAL,EAAP;AACH;;;;EAjEgC+Q,8C;;AAAhBH,sE;;;;;;;;;;;;;;;;;;;;;;;ACXrB;AACA;;AAEA;;;;;;;;IAOqBd,Y;;;;;;;;;;;;AACnB;;;;;;;8BAOSoC,G,EAAK;AACR,gBAAMC,QAAQ,yDAAd;AACAD,kBAAME,OAAOF,GAAP,CAAN;AACA,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAC/F,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIK,UAAUL,IAAIM,KAAJ,CAAUL,KAAV,CAAd;AACAE,yBAASE,UAAa3N,OAAO6N,UAAP,CAAkBF,QAAQ,CAAR,CAAlB,CAAb,SAA8C3N,OAAO6N,UAAP,CAAkBF,QAAQ,CAAR,CAAlB,CAA9C,GACUjG,4DAAiBA,CAACoG,EADrC;AAEH,aAJD,MAIO;AACHL,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EArBqCO,qD;;AAArB9C,2E;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;;AAEA;;;;;;;;IAOqBO,iB;;;;;;;;;;;;AACnB;;;;;;;8BAOS6B,G,EAAK;AACR,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAC/F,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnCG,yBAASD,OAAOF,GAAP,EAAYpa,IAAZ,EAAT;AACH,aAFD,MAEO;AACHua,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAjB0CO,qD;;AAA1BvC,gF;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;;AAEA;;;;;;;;IAOqBM,gB;;;;;;;;;;;;AACnB;;;;;;;8BAOSuB,G,EAAK;AACR,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAC/F,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIW,YAAYJ,WAAWP,GAAX,EAAgB,EAAhB,CAAhB;AACAG,yBAASzN,OAAO3M,KAAP,CAAa4a,SAAb,IAA0BvG,4DAAiBA,CAACoG,EAA5C,GAAiDG,SAA1D;AACH,aAHD,MAGO;AACHR,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAlByCO,qD;;AAAzBjC,+E;;;;;;;;;;;;;;;;;ACVrB;;;;;;IAMqBiC,W;;;;;;;;AACjB;;;;;;4BAMS;AACL,YAAM,IAAIlS,KAAJ,CAAU,qBAAV,CAAN;AACH;;;;;;AATgBkS,0E;;;;;;;;;;;;;;;;;;;;;;;;ACNrB;AACA;AACA;;AAEA;;;;;;;;IAOqBE,c;;;;;;;;;;;;;AAEjB;;;;;;;8BAOOZ,G,QAAiB;AAAA,gBAAVvZ,MAAU,QAAVA,MAAU;;AACpB,gBAAI0Z,eAAJ;AACA;AACA,gBAAI,CAAC,KAAKU,IAAV,EAAgB;AACZ,qBAAKA,IAAL,GAAY,IAAIrE,wDAAJ,CAAsB/V,MAAtB,CAAZ;AACH;AACD,gBAAI,CAAC2T,4DAAiBA,CAACgG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIc,aAAa,KAAKD,IAAL,CAAUE,aAAV,CAAwBf,GAAxB,CAAjB;AACAG,yBAASW,aAAaA,WAAWE,OAAX,EAAb,GAAoC5G,4DAAiBA,CAACoG,EAA/D;AACH,aAHD,MAGO;AACHL,yBAAS/F,4DAAiBA,CAACqG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAtBuCO,qD;;AAAvBE,6E;;;;;;;;;;;;;;;;;ACXrB;;;;;;;;IAQqBnB,Y;AACjB;;;;;;;;;AASA,wBAAalY,IAAb,EAAmBuG,IAAnB,EAAyBC,MAAzB,EAAiC2R,MAAjC,EAAyC;AAAA;;AACrC,SAAKnY,IAAL,GAAYA,IAAZ;AACA,SAAKwG,MAAL,GAAcA,MAAd;AACA,SAAK2R,MAAL,GAAcA,MAAd;AACA,SAAK5R,IAAL,GAAY,KAAKmT,SAAL,CAAenT,IAAf,CAAZ;AACH;;AAED;;;;;;;;;;;8BAOWA,I,EAAM;AAAA;;AACb,aAAOA,KAAKxG,GAAL,CAAS;AAAA,eAAS,MAAKoY,MAAL,CAAYrZ,KAAZ,CAAkB8L,KAAlB,EAAyB,EAAE1L,QAAQ,MAAKsH,MAAL,CAAYtH,MAAtB,EAAzB,CAAT;AAAA,OAAT,CAAP;AACH;;;;;;AA1BgBgZ,2E;;;;;;;;;;;;;;;;;;;;;;;;;;ACRrB;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;IAOqBN,Q;;;AAChB;;;;;;;AAOD,sBAAarK,YAAb,EAA2BxC,UAA3B,EAAuC;AAAA;;AAAA,wHAC7BwC,YAD6B,EACfxC,UADe;;AAGnC,cAAK4O,cAAL,GAAsB,IAAtB;AAHmC;AAItC;;AAEA;;;;;;;;;;;8CAOsB;AAAA;;AACnB,gBAAMnD,OAAO,IAAIC,GAAJ,EAAb;AACA,gBAAMC,SAAS,EAAf;;AAEA;AACA;AACAtH,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI,CAACuW,KAAKvQ,GAAL,CAAS2E,KAAT,CAAL,EAAsB;AAClB4L,yBAAKG,GAAL,CAAS/L,KAAT;AACA8L,2BAAO3V,IAAP,CAAY6J,KAAZ;AACH;AACJ,aAND;;AAQA,mBAAO8L,MAAP;AACH;;AAGD;;;;;;;;;uDAMgC;AAC5B,gBAAI,KAAKiD,cAAT,EAAyB;AACrB,uBAAO,KAAKA,cAAZ;AACH;;AAED,gBAAMC,aAAa,KAAKrT,IAAL,GAAYsT,MAAZ,CAAmB;AAAA,uBAAQ,EAAEhR,gBAAgBgK,4DAAlB,CAAR;AAAA,aAAnB,EAAiEtJ,IAAjE,CAAsE,UAACuQ,CAAD,EAAIC,CAAJ;AAAA,uBAAUD,IAAIC,CAAd;AAAA,aAAtE,CAAnB;AACA,gBAAMC,QAAQJ,WAAWzY,MAAzB;AACA,gBAAI8Y,UAAU9O,OAAO4L,iBAArB;AACA,gBAAImD,kBAAJ;AACA,gBAAIC,kBAAJ;AACA,gBAAIC,iBAAiB,CAArB;;AAEA,iBAAK,IAAIna,IAAI,CAAb,EAAgBA,IAAI+Z,KAApB,EAA2B/Z,GAA3B,EAAgC;AAC5Bia,4BAAYN,WAAW3Z,IAAI,CAAf,CAAZ;AACAka,4BAAYP,WAAW3Z,CAAX,CAAZ;;AAEA,oBAAIka,cAAcD,SAAlB,EAA6B;AACzB;AACH;;AAEDD,0BAAUI,KAAKvD,GAAL,CAASmD,OAAT,EAAkBE,YAAYP,WAAW3Z,IAAI,CAAf,CAA9B,CAAV;AACAma;AACH;;AAED,gBAAI,CAACA,cAAL,EAAqB;AACjBH,0BAAU,IAAV;AACH;AACD,iBAAKN,cAAL,GAAsBM,OAAtB;;AAEA,mBAAO,KAAKN,cAAZ;AACH;;AAED;;;;;;;;;iCAMU;AACN,mBAAO,KAAKpM,YAAL,CAAkB/G,MAAlB,CAAyBtH,MAAhC;AACH;;AAED;;;;;;;;;;wCAOiB;AAAA;;AACb,gBAAMqH,OAAO,EAAb;AACA,gBAAMY,aAAa,KAAKjI,MAAL,EAAnB;;AAEAkQ,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA;AACA,oBAAI4S,4DAAiBA,CAACgG,SAAlB,CAA4BjO,KAA5B,KAAuC,CAACzD,UAAD,IAAegE,OAAOmP,QAAP,CAAgB1P,KAAhB,CAA1D,EAAmF;AAC/E;AACA,wBAAM2P,cAAc1H,4DAAiBA,CAACqG,cAAlB,CAAiCtO,KAAjC,KAA2CA,KAA/D;AACArE,yBAAKxF,IAAL,CAAUwZ,WAAV;AACH,iBAJD,MAIO;AACHhU,yBAAKxF,IAAL,CAAUkU,wDAAiBA,CAACuF,QAAlB,CAA2B5P,KAA3B,EAAkCzD,UAAlC,CAAV;AACH;AACJ,aAVD;AAWA,mBAAOZ,IAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAI8S,gEAAJ,EAAP;AACH;;;;EAlHiC/C,kD;;AAAjBsB,uE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACbrB;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;AAGA,SAAS6C,oBAAT,CAA+BtS,MAA/B,EAAuC0E,aAAvC,EAAsDR,OAAtD,EAA+DpM,CAA/D,EAAkE;AAC9D,QAAMya,OAAO,EAAb;;AAD8D;AAAA;AAAA;;AAAA;AAG9D,6BAA2BvS,OAAOwS,OAAP,EAA3B,8HAA6C;AAAA;;AAAA;;AAAA,gBAAjCxc,GAAiC;AAAA,gBAA5BiK,KAA4B;;AACzCsS,iBAAKtS,MAAMpI,IAAN,EAAL,IAAqB,IAAI0N,8CAAJ,CAAUb,cAAc1O,GAAd,EAAmB8B,CAAnB,CAAV,EAAiCoM,QAAQlO,GAAR,EAAa8B,CAAb,CAAjC,EAAkDmI,KAAlD,CAArB;AACH;AAL6D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAM9D,WAAOsS,IAAP;AACH;;AAEM,SAASE,eAAT,CAA0BzS,MAA1B,EAAkC;AACrC,QAAMuS,OAAO,EAAb;;AAEA,SAAK,IAAMvc,GAAX,IAAkBgK,MAAlB,EAA0B;AACtBuS,aAAKvc,GAAL,IAAY,IAAIuP,8CAAJ,CAAUvF,OAAOhK,GAAP,EAAY0c,cAAtB,EAAsC1S,OAAOhK,GAAP,EAAY2c,QAAlD,EAA4D3c,GAA5D,CAAZ;AACH;AACD,WAAOuc,IAAP;AACH;;AAEM,IAAMK,eAAe,SAAfA,YAAe,QAA8B5N,iBAA9B,EAAiD6N,cAAjD,EAAoE;AAAA;AAAA,QAAlEjQ,UAAkE;AAAA,QAAtDkQ,aAAsD;;AAC5F,QAAIC,SAASD,cAAc9Z,MAAd,GAAuB8Z,cAAc/P,KAAd,CAAoB,GAApB,CAAvB,GAAkD,EAA/D;AACA,QAAIiQ,kBAAkBhO,kBAAkBK,SAAlB,EAAtB;AACA,QAAI4N,YAAYF,OAAOnb,GAAP,CAAW;AAAA,eAAQwV,iFAA0BA,CAAC4F,gBAAgBE,IAAhB,EAAsB9N,YAAjD,EAA+DxC,UAA/D,CAAR;AAAA,KAAX,CAAhB;AACA,WAAO2K,oDAAUA,CAACC,eAAX,CAA2ByF,SAA3B,EAAsCJ,cAAtC,CAAP;AACH,CALM;;AAOA,IAAMM,2BAA2B,SAA3BA,wBAA2B,CAAC/K,KAAD,EAAQgL,SAAR,EAA+C;AAAA,QAA5B7P,MAA4B,uEAAnB,EAAmB;AAAA,QAAf8P,UAAe;;AACnF,QAAID,cAAcrX,yDAAcA,CAACI,OAAjC,EAA0C;AAAA;;AACtCiM,cAAMkL,WAAN,CAAkBta,MAAlB,GAA2B,CAA3B;AACA,oCAAMsa,WAAN,EAAkB1a,IAAlB,8CAA0Bya,UAA1B;AACH,KAHD,MAGO;AACHjL,cAAMkL,WAAN,CAAkB1a,IAAlB,CAAuB;AACnB2a,gBAAIH,SADe;AAEnBI,kBAAMjQ,MAFa;AAGnBkQ,sBAAUJ;AAHS,SAAvB;AAKH;AACJ,CAXM;AAYA,IAAMK,4BAA4B,SAA5BA,yBAA4B,CAACC,QAAD,EAAWC,KAAX,EAAqB;AAAA;;AAC1D,mCAAMC,mBAAN,EAA0Bjb,IAA1B,iDAAkC+a,SAASE,mBAA3C,4BAAmEF,SAASL,WAA5E;AACH,CAFM;;AAIA,IAAMzP,qBAAqB,SAArBA,kBAAqB,CAAC8P,QAAD,EAAWvL,KAAX,EAAkBgL,SAAlB,EAAyD;AAAA,QAA5B7P,MAA4B,uEAAnB,EAAmB;AAAA,QAAf8P,UAAe;;AACvFF,6BAAyB/K,KAAzB,EAAgCgL,SAAhC,EAA2C7P,MAA3C,EAAmD8P,UAAnD;AACAK,8BAA0BC,QAA1B,EAAoCvL,KAApC;AACH,CAHM;;AAKP,IAAM0L,sEACD/J,oDAAaA,CAACC,MADb,EACsB;AACpB+J,eAAW,CAAC,YAAD,CADS;AAEpBC,cAAU,CAAC,IAAD,EAAO,KAAP;AAFU,CADtB,mCAKDjK,oDAAaA,CAACkB,OALb,EAKuB;AACrB8I,eAAW,CAAC,kBAAD,CADU;AAErBC,cAAU,CAAC,KAAD,EAAQ,IAAR;AAFW,CALvB,mCASDjK,oDAAaA,CAACmB,GATb,EASmB;AACjB6I,eAAW,CAAC,YAAD,EAAe,kBAAf,CADM;AAEjBC,cAAU,CAAC,IAAD,EAAO,IAAP;AAFO,CATnB,kBAAN;;AAeA,IAAMC,qBAAqB,SAArBA,kBAAqB,CAACrR,UAAD,EAAa9K,CAAb,EAAgBoc,iBAAhB,EAAsC;AAC7D,QAAIA,sBAAsB,CAAC,CAAvB,IAA4Bpc,MAAOoc,oBAAoB,CAA3D,EAA+D;AAC3D,YAAMC,KAAKvR,WAAW5J,MAAX,GAAoB,CAA/B;;AAEA4J,mBAAWuR,EAAX,IAAoBvR,WAAWuR,EAAX,EAAepR,KAAf,CAAqB,GAArB,EAA0B,CAA1B,CAApB,SAAoDjL,CAApD;AACH,KAJD,MAIO;AACH8K,mBAAWhK,IAAX,MAAmBd,CAAnB;AACH;AACJ,CARD;;AAUO,IAAMsc,2BAA2B,SAA3BA,wBAA2B,CAACxR,UAAD,EAAayR,OAAb,EAAsBvK,IAAtB,EAA+B;AACnE,QAAIwK,uBAAuB,CAAC,CAA5B;AACA,QAAIC,uBAAuB,CAAC,CAA5B;AACA,QAAMC,gBAAgB,EAAtB;AACA,QAAMC,gBAAgB,EAAtB;;AAJmE,+CAM9BX,cAAchK,IAAd,EAAoBkK,QANU;AAAA,QAM5DU,YAN4D;AAAA,QAM9CC,YAN8C;;AAQnE1N,wEAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAM8c,gBAAgBP,QAAQvc,CAAR,CAAtB;AACA8c,yBAAiBF,YAAjB,IAAiCT,mBAAmBO,aAAnB,EAAkC1c,CAAlC,EAAqCwc,oBAArC,CAAjC;AACA,SAACM,aAAD,IAAkBD,YAAlB,IAAkCV,mBAAmBQ,aAAnB,EAAkC3c,CAAlC,EAAqCyc,oBAArC,CAAlC;AACH,KAJD;AAKA,WAAO;AACH3R,oBAAY4R,cAAcvc,IAAd,CAAmB,GAAnB,CADT;AAEH4c,0BAAkBJ,cAAcxc,IAAd,CAAmB,GAAnB;AAFf,KAAP;AAIH,CAjBM;;AAoBA,IAAM6c,0BAA0B,SAA1BA,uBAA0B,CAAClS,UAAD,EAAayR,OAAb,EAAsBvK,IAAtB,EAA4BH,YAA5B,EAA0CoL,aAA1C,EAA4D;AAC/F,QAAIb,oBAAoB,EAAxB;AACA,QAAMc,kBAAkB,EAAxB;AACA,QAAMC,eAAe,EAArB;;AAEAhO,wEAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAIuc,QAAQvc,CAAR,CAAJ,EAAgB;AACZ,gBAAIuW,OAAO,EAAX;;AAEA,gBAAI6G,eAAe,EAAEvU,MAAM,EAAR,EAAnB;;AAEAgJ,yBAAajR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxB,oBAAMhF,OAAO2W,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAb;AACAuW,uBAAUA,IAAV,SAAkBjQ,IAAlB;AACA8W,6BAAavU,IAAb,CAAkByC,CAAlB,IAAuBhF,IAAvB;AACH,aAJD;;AAMA,gBAAI4W,gBAAgB3G,IAAhB,MAA0B1L,SAA9B,EAAyC;AACrCqS,gCAAgB3G,IAAhB,IAAwB,EAAxB;AACA6F,kCAAkB7F,IAAlB,IAA0B,CAAC,CAA3B;AACA4G,6BAAa5G,IAAb,IAAqB6G,YAArB;AACH;;AAEDjB,+BAAmBe,gBAAgB3G,IAAhB,CAAnB,EAA0CvW,CAA1C,EAA6Coc,kBAAkB7F,IAAlB,CAA7C;AACA6F,8BAAkB7F,IAAlB,IAA0BvW,CAA1B;AACH;AACJ,KArBD;;AAuBA,WAAO;AACHkd,wCADG;AAEHC;AAFG,KAAP;AAIH,CAhCM;;AAmCA,IAAME,eAAe,SAAfA,YAAe,CAACC,QAAD,EAAWC,QAAX,EAAqB9R,MAArB,EAA6BoQ,QAA7B,EAAuC2B,QAAvC,EAAoD;AAC5E,QAAIzO,cAAc,EAAlB;AACA,QAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,eAAM6M,SAAS5M,YAAT,EAAN;AAAA,KAApB;AAF4E,QAGpE+C,IAHoE,GAG3DvG,MAH2D,CAGpEuG,IAHoE;;AAI5E,QAAMlH,aAAawS,SAAS3T,WAA5B;AACA,QAAMyD,qBAAqBkQ,SAASnQ,kBAAT,CAA4BE,mBAAvD;;AAEA,QAAMoQ,mBAAmB,SAAnBA,gBAAmB;AAAA,eAASF,SAC9BnQ,mBAAmBuB,KAAnB,CAD8B,EAE9BA,KAF8B,EAG9BK,aAH8B,EAI9BD,WAJ8B,CAAT;AAAA,KAAzB;;AAOA,WAAOyO,SAAS1S,UAAT,EAAqB2S,gBAArB,EAAuCzL,IAAvC,CAAP;AACH,CAfM;;AAiBA,IAAM0L,qBAAqB,SAArBA,kBAAqB,CAACpN,KAAD,EAAW;AACzC,QAAMgN,WAAWhN,MAAM1B,KAAN,CAAY,KAAZ,CAAjB;AACA,QAAM1B,oBAAoBoD,MAAM/G,oBAAN,EAA1B;AACA+T,aAASzT,cAAT,GAA0BqD,kBAAkBhF,MAAlB,CAAyBpI,GAAzB,CAA6B;AAAA,eAAKO,EAAEN,IAAF,EAAL;AAAA,KAA7B,EAA4CI,IAA5C,CAAiD,GAAjD,CAA1B;;AAEA;AACA+M,sBAAkBW,gBAAlB,GAAqC,IAArC;AACAX,sBAAkBY,gBAAlB,GAAqC,IAArC;AACAZ,sBAAkBa,cAAlB,GAAmC,IAAnC;AACAuP,aAAStP,qBAAT,GAAiCC,qBAAjC;;AAEA,WAAOqP,QAAP;AACH,CAZM;;AAcP,IAAMK,SAAS,SAATA,MAAS,CAACvW,GAAD,EAAMd,IAAN,EAAYwK,EAAZ,EAAmB;AAC9B,QAAI5S,MAAM4S,GAAG1J,GAAH,EAAQd,IAAR,EAAc,CAAd,CAAV;;AAEA,SAAK,IAAItG,IAAI,CAAR,EAAW4d,MAAMxW,IAAIlG,MAA1B,EAAkClB,IAAI4d,GAAtC,EAA2C5d,GAA3C,EAAgD;AAC5C9B,cAASA,GAAT,SAAgB4S,GAAG1J,GAAH,EAAQd,IAAR,EAActG,CAAd,CAAhB;AACH;AACD,WAAO9B,GAAP;AACH,CAPD;;AASA,IAAM2f,QAAQ,SAARA,KAAQ,CAACzW,GAAD,EAAMc,MAAN,EAAcoC,GAAd,EAAmBwT,KAAnB,EAA6B;AACvC,QAAMtF,MAAMtQ,OAAOd,IAAIkD,GAAJ,CAAP,EAAiByT,aAA7B;AACA,WAAO3W,IAAIkD,GAAJ,MAAatG,iDAAb,GAAsB8Z,KAAtB,GAA8BtF,GAArC;AACH,CAHD;;AAKA,IAAMwF,oCACDlK,qDAAcA,CAACC,UADd,EAC2B,UAACyE,GAAD,EAAM/B,MAAN,EAAiB;AAC1C,QAAMwH,YAAYxH,OAAO,CAAP,aAAqBtV,KAArB,GAA6BsV,MAA7B,GAAsC,CAACA,MAAD,CAAxD;AACA,WAAOwH,UAAUC,IAAV,CAAe;AAAA,eAAO1F,OAAO2F,IAAI,CAAJ,CAAP,IAAiB3F,OAAO2F,IAAI,CAAJ,CAA/B;AAAA,KAAf,CAAP;AACH,CAJC,CAAN;;AAOA,IAAMC,iBAAiB,SAAjBA,cAAiB,CAACjgB,KAAD,EAAQsY,MAAR,EAAgB4H,SAAhB;AAAA,WAA8BL,cAAcK,SAAd,EAAyBlgB,KAAzB,EAAgCsY,MAAhC,CAA9B;AAAA,CAAvB;;AAEO,IAAM6H,yBAAyB,SAAzBA,sBAAyB,CAAChO,KAAD,EAAQiO,UAAR,EAAoC;AAAA,QAAhB9S,MAAgB,uEAAP,EAAO;;AACtE,QAAI+S,MAAM,EAAV;AACA,QAAMlD,YAAY7P,OAAO6P,SAAP,IAAoBvW,4DAAiBA,CAACC,GAAxD;AACA,QAAMyZ,kBAAkBhT,OAAOgT,eAAP,IAA0B,KAAlD;AACA,QAAMC,cAAchB,mBAAmBpN,KAAnB,CAApB;AACA,QAAMqO,oBAAoBD,YAAYpQ,eAAZ,EAA1B;;AAEA,QAAI,CAACiQ,WAAWrd,MAAhB,EAAwB;AACpBsd,cAAM,CAAC;AAAA,mBAAM,KAAN;AAAA,SAAD,CAAN;AACH,KAFD,MAEO;AACHA,cAAMD,WAAWze,GAAX,CAAe;AAAA,mBAAc,YAAmB;AAAA,oBAAlB6b,QAAkB,uEAAP,EAAO;AAAA,4CACRA,QADQ,CAC1CpM,WAD0C;AAAA,oBAC1CA,WAD0C,yCAC5B,CAAC,EAAD,EAAK,EAAL,CAD4B;AAAA,oBAClBqP,KADkB,GACRjD,QADQ,CAClBiD,KADkB;;AAAA,kDAEXrP,WAFW;AAAA;AAAA,oBAE3CtF,UAF2C,iCAE9B,EAF8B;AAAA;AAAA,oBAE1B4U,MAF0B,kCAEjB,EAFiB;;AAGlD,oBAAMC,OAAO7U,WAAW/I,MAAxB;AACA,oBAAM6d,YAAY,EAAlB;;AAEA,oBAAID,IAAJ,EAAU;AACN,yBAAK,IAAI9e,IAAI,CAAR,EAAW4d,MAAMrO,YAAYrO,MAAlC,EAA0ClB,IAAI4d,GAA9C,EAAmD5d,GAAnD,EAAwD;AACpD,4BAAMM,MAAMiP,YAAYvP,CAAZ,CAAZ;AACA,4BAAM9B,MAAMoC,IAAIH,IAAJ,EAAZ;AACA4e,kCAAU7gB,GAAV,IAAiB,CAAjB;AACH;AACJ;AACD,oBAAM8gB,YAAYte,OAAOmI,IAAP,CAAY+V,SAAS,EAArB,CAAlB;AACA,uBAAOC,OAAO3d,MAAP,IAAiB8d,UAAU9d,MAA3B,GAAoC,UAACgH,MAAD,EAASlI,CAAT,EAAe;AACtD,wBAAMif,UAAUH,OAAOC,UAAUpB,OAAO1T,UAAP,EAAmB/B,MAAnB,EAA2B2V,KAA3B,EAAkC7d,CAAlC,CAAV,CAAP,GAAyD,IAAzE;;AAEA,wBAAIye,eAAJ,EAAqB;AACjB,+BAAOO,UAAUE,KAAV,CAAgB,UAAC/W,KAAD,EAAW;AAC9B,gCAAMqQ,MAAMtQ,OAAOC,KAAP,EAAc4V,aAA1B;AACA,mCAAOK,eAAe5F,GAAf,EAAoBoG,MAAMzW,KAAN,CAApB,EAAkCwW,kBAAkBxW,KAAlB,EAAyBgX,GAAzB,CAA6B3N,OAA/D,CAAP;AACH,yBAHM,KAGDyN,OAHN;AAIH;AACD,2BAAOA,OAAP;AACH,iBAVM,GAUH;AAAA,2BAAM,KAAN;AAAA,iBAVJ;AAWH,aAzBiC,CAyB/BrO,SAzB+B,CAAb;AAAA,SAAf,CAAN;AA0BH;;AAED,QAAIwO,sBAAJ;AACA,QAAI9D,cAAcvW,4DAAiBA,CAACC,GAApC,EAAyC;AACrCoa,wBAAgBV,YAAYvK,MAAZ,CAAmB;AAAA,mBAAUqK,IAAIU,KAAJ,CAAU;AAAA,uBAAMpO,GAAG5I,MAAH,CAAN;AAAA,aAAV,CAAV;AAAA,SAAnB,EAA0D;AACtEwD,uBAAW;AAD2D,SAA1D,CAAhB;AAGH,KAJD,MAIO;AACH0T,wBAAgBV,YAAYvK,MAAZ,CAAmB;AAAA,mBAAUqK,IAAIN,IAAJ,CAAS;AAAA,uBAAMpN,GAAG5I,MAAH,CAAN;AAAA,aAAT,CAAV;AAAA,SAAnB,EAAyD;AACrEwD,uBAAW;AAD0D,SAAzD,CAAhB;AAGH;;AAED,WAAO0T,aAAP;AACH,CAlDM;;AAqDA,IAAMjN,kBAAkB,SAAlBA,eAAkB,CAAC0J,QAAD,EAAWhK,YAAX,EAA4D;AAAA,QAAnCC,SAAmC,uEAAvB;AAAA,eAAO0G,GAAP;AAAA,KAAuB;AAAA,QAAX/M,MAAW;AAAA,QAEnFC,SAFmF,GAGnFD,MAHmF,CAEnFC,SAFmF;;AAIvF,QAAMuR,gBAAgBpB,SAASnP,aAAT,GAAyBa,SAAzB,EAAtB;;AAJuF,wBASnF8P,aACAxB,SAASjN,KAAT,CAAelD,SAAf,CADA,EAEAoG,SAFA,EAGArG,MAHA,EAIAoQ,QAJA,EAKA;AAAA,0CAAIjQ,MAAJ;AAAIA,kBAAJ;AAAA;;AAAA,eAAeoR,yCAA2BpR,MAA3B,SAAmCiG,YAAnC,EAAiDoL,aAAjD,GAAf;AAAA,KALA,CATmF;AAAA,QAOnFC,eAPmF,iBAOnFA,eAPmF;AAAA,QAQnFC,YARmF,iBAQnFA,YARmF;;AAiBvF,QAAMkC,YAAY,EAAlB;AACA3e,WAAOmI,IAAP,CAAYqU,eAAZ,EAA6B5T,IAA7B,GAAoC1I,OAApC,CAA4C,UAACsJ,CAAD,EAAO;AAC/C,YAAIgT,gBAAgBhT,CAAhB,CAAJ,EAAwB;AACpB,gBAAMoV,SAASzD,SAASjN,KAAT,CAAelD,SAAf,CAAf;AACA,gBAAM6T,aAAapC,aAAajT,CAAb,CAAnB;AACAoV,mBAAO3V,WAAP,GAAqBuT,gBAAgBhT,CAAhB,EAAmB/J,IAAnB,CAAwB,GAAxB,CAArB;AACAmf,mBAAOtR,qBAAP,GAA+BC,qBAA/B;;AAEA,gBAAMuR,oBAAoB,SAApBA,iBAAoB;AAAA,uBAAU3N,aAAaqN,KAAb,CAAmB;AAAA,2BAAKhX,OAAOoD,CAAP,EAAUyS,aAAV,KAA4BwB,WAAW1W,IAAX,CAAgByC,CAAhB,CAAjC;AAAA,iBAAnB,CAAV;AAAA,aAA1B;AACA;AACA,gBAAII,SAAJ,EAAe;AACXK,mCAAmB8P,QAAnB,EAA6ByD,MAA7B,EAAqCrb,yDAAcA,CAACC,MAApD,EAA4DuH,MAA5D,EAAoE+T,iBAApE;AACH;AACDF,mBAAO9D,WAAP,CAAmB8D,OAAO9D,WAAP,CAAmBta,MAAnB,GAA4B,CAA/C,EAAkDwa,IAAlD,GAAyDyB,aAAajT,CAAb,CAAzD;;AAEAmV,sBAAUve,IAAV,CAAewe,MAAf;AACH;AACJ,KAhBD;;AAmBA,WAAOD,SAAP;AACH,CAtCM;AAuCA,IAAMI,uBAAuB,SAAvBA,oBAAuB,CAACnC,QAAD,EAAWxS,UAAX,EAAuB+Q,QAAvB,EAAiC6D,YAAjC,EAA+CnC,QAA/C,EAA4D;AAC5FD,aAAS3T,WAAT,GAAuBmB,UAAvB;AACAwS,aAAStP,qBAAT,GAAiCC,qBAAjC;AACAlC,uBACI8P,QADJ,EAEIyB,QAFJ,EAGIrZ,yDAAcA,CAACC,MAHnB,EAIK,EAAEuH,QAAQiU,YAAV,EAJL,EAKMnC,QALN;AAOH,CAVM;;AAaA,IAAMoC,kBAAkB,SAAlBA,eAAkB,CAAC9D,QAAD,EAAW0B,QAAX,EAAqBmC,YAArB,EAAmCE,WAAnC,EAAmD;AAC9E,QAAIC,eAAe,EAAnB;;AAD8E,QAGxE7N,IAHwE,GAG/D0N,YAH+D,CAGxE1N,IAHwE;;;AAK9E,QAAMsN,SAASzD,SAASjN,KAAT,CAAegR,YAAYlU,SAA3B,CAAf;AACA,QAAMoU,mBAAmBzC,aACrBiC,MADqB,EAErB/B,QAFqB,EAGrBmC,YAHqB,EAIrB7D,QAJqB,EAKrBS,wBALqB,CAAzB;AAOA,QAAML,YAAYD,cAAchK,IAAd,EAAoBiK,SAAtC;;AAEAwD,yBAAqBH,MAArB,EAA6BQ,iBAAiB7D,UAAU,CAAV,CAAjB,CAA7B,EAA6DJ,QAA7D,EAAuE6D,YAAvE,EAAqFnC,QAArF;;AAEA,QAAItB,UAAU/a,MAAV,GAAmB,CAAvB,EAA0B;AACtB2e,uBAAehE,SAASjN,KAAT,CAAegR,YAAYlU,SAA3B,CAAf;AACA+T,6BAAqBI,YAArB,EAAmCC,iBAAiB7D,UAAU,CAAV,CAAjB,CAAnC,EAAmEJ,QAAnE,EAA6E6D,YAA7E,EAA2FnC,QAA3F;AACA,eAAO,CAAC+B,MAAD,EAASO,YAAT,CAAP;AACH;;AAED,WAAOP,MAAP;AACH,CAxBM;;AA0BA,IAAMS,mBAAmB,SAAnBA,gBAAmB,CAAClE,QAAD,EAAWmE,SAAX,EAAsBvU,MAAtB,EAA8B8G,SAA9B,EAA4C;AACxE,QAAM+M,SAASzD,SAASjN,KAAT,CAAenD,OAAOC,SAAtB,CAAf;AACA,QAAIuU,gBAAgBD,SAApB;AACA,QAAIvU,OAAOuG,IAAP,KAAgBC,oDAAaA,CAACkB,OAAlC,EAA2C;AACvC8M,wBAAgB1N,UAAUqH,MAAV,CAAiB;AAAA,mBAAaoG,UAAUzV,OAAV,CAAkB0C,SAAlB,MAAiC,CAAC,CAA/C;AAAA,SAAjB,CAAhB;AACH;AACD;AACA;AACAqS,WAAOzV,cAAP,GAAwBoW,cAAc9f,IAAd,CAAmB,GAAnB,CAAxB;AACAmf,WAAOtR,qBAAP,GAA+BC,qBAA/B;;AAEAlC,uBACI8P,QADJ,EAEIyD,MAFJ,EAGIrb,yDAAcA,CAACE,OAHnB,EAII,EAAE6b,oBAAF,EAAavU,cAAb,EAAqByU,iBAAiBD,aAAtC,EAJJ,EAKI,IALJ;;AAQA,WAAOX,MAAP;AACH,CApBM;;AAuBA,IAAM3M,mBAAmB,SAAnBA,gBAAmB,CAACkJ,QAAD,EAAWsE,YAAX,EAAyB1U,MAAzB,EAAiC8G,SAAjC;AAAA,WAC5B4N,aAAargB,GAAb,CAAiB;AAAA,eACbigB,iBAAiBlE,QAAjB,EAA2BuE,UAA3B,EAAuC3U,MAAvC,EAA+C8G,SAA/C,CADa;AAAA,KAAjB,CAD4B;AAAA,CAAzB;;AAIA,IAAMpE,qBAAqB,SAArBA,kBAAqB,CAAC1G,UAAD,EAAgB;AAC9C;AACAA,iBAAa4Y,sDAAOA,CAAC,EAAR,EAAY5Y,UAAZ,CAAb;AACA,QAAI,CAACA,WAAW5B,IAAhB,EAAsB;AAClB4B,mBAAW5B,IAAX,GAAkByL,gDAASA,CAACC,SAA5B;AACH;;AAED,QAAI,CAAC9J,WAAW+J,OAAhB,EAAyB;AACrB,gBAAQ/J,WAAW5B,IAAnB;AACA,iBAAKyL,gDAASA,CAAC4B,OAAf;AACIzL,2BAAW+J,OAAX,GAAqBsC,qDAAcA,CAACC,UAApC;AACA;AACJ;AACA,iBAAKzC,gDAASA,CAACC,SAAf;AACI9J,2BAAW+J,OAAX,GAAqBC,uDAAgBA,CAACuB,WAAtC;AACA;AAPJ;AASH;;AAED,WAAOvL,UAAP;AACH,CApBM;;AAsBA,IAAM6Y,qBAAqB,SAArBA,kBAAqB,CAAC7Y,UAAD,EAAgB;AAAA,QACtC5B,IADsC,GACd4B,UADc,CACtC5B,IADsC;AAAA,QAChC2L,OADgC,GACd/J,UADc,CAChC+J,OADgC;AAAA,QACvBzR,IADuB,GACd0H,UADc,CACvB1H,IADuB;;AAE9C,QAAI8F,SAASyL,gDAASA,CAACC,SAAnB,IAAgC1L,SAASyL,gDAASA,CAAC4B,OAAvD,EAAgE;AAC5D,YAAI,CAACJ,qDAAaA,CAAC9M,GAAd,CAAkBwL,OAAlB,CAAL,EAAiC;AAC7B,kBAAM,IAAIxK,KAAJ,uDAA6DwK,OAA7D,kBAAiFzR,IAAjF,YAAN;AACH;AACJ,KAJD,MAIO;AACH,cAAM,IAAIiH,KAAJ,4CAAkDnB,IAAlD,kBAAmE9F,IAAnE,YAAN;AACH;AACJ,CATM;;AAWA,IAAMwgB,4BAA4B,SAA5BA,yBAA4B;AAAA,WAAUha,OAAOzG,GAAP,CAAW,UAAC2H,UAAD,EAAgB;AAC1EA,qBAAa0G,mBAAmB1G,UAAnB,CAAb;AACA6Y,2BAAmB7Y,UAAnB;AACA,eAAOA,UAAP;AACH,KAJkD,CAAV;AAAA,CAAlC;;AAMA,IAAM+Y,mBAAmB,SAAnBA,gBAAmB,CAACja,MAAD,EAASka,UAAT,EAAwB;AACpDla,WAAO3F,OAAP,CAAe,UAAC6G,UAAD,EAAgB;AAC3B,YAAMiZ,cAAcjZ,WAAWkZ,EAA/B;AACA,YAAI,CAACD,WAAL,EAAkB;AAAE;AAAS;;AAE7B,YAAMpW,MAAMmW,WAAWlW,OAAX,CAAmB9C,WAAW1H,IAA9B,CAAZ;AACA0gB,mBAAWnW,GAAX,IAAkBoW,WAAlB;AACAjZ,mBAAW1H,IAAX,GAAkB2gB,WAAlB;AACA,eAAOjZ,WAAWkZ,EAAlB;AACH,KARD;AASH,CAVM;;AAYA,IAAMC,aAAa,SAAbA,UAAa,CAACC,QAAD,EAAWva,IAAX,EAAiBC,MAAjB,EAAyBC,OAAzB,EAAqC;AAC3DD,aAASga,0BAA0Bha,MAA1B,CAAT;AACAC,cAAU9F,OAAOgH,MAAP,CAAchH,OAAOgH,MAAP,CAAc,EAAd,EAAkBoZ,uDAAlB,CAAd,EAAgDta,OAAhD,CAAV;AACA,QAAMZ,YAAYM,yDAAcA,CAACD,GAAf,CAAmBO,QAAQU,UAA3B,CAAlB;;AAGA,QAAI,CAACtB,SAAL,EAAgB;AACZ,cAAM,IAAIoB,KAAJ,sCAA6CR,QAAQU,UAArD,aAAN;AACH;;AAR0D,6BAU3BtB,UAAUnD,OAAV,CAAkB6D,IAAlB,EAAwBC,MAAxB,EAAgCC,OAAhC,CAV2B;AAAA;AAAA,QAUpDiC,MAVoD;AAAA,QAU5CmE,aAV4C;;AAW3D4T,qBAAiBja,MAAjB,EAAyBkC,MAAzB;AACA,QAAMkN,WAAWtG,mEAAYA,CAACzC,aAAb,EAA4BrG,MAA5B,EAAoCkC,MAApC,CAAjB;;AAEA;AACA,QAAMsY,YAAYtL,oDAAUA,CAACC,eAAX,CAA2BC,QAA3B,EAAqCnP,QAAQzG,IAA7C,CAAlB;AACA8gB,aAAS1T,kBAAT,GAA8B4T,SAA9B;;AAEA;AACAF,aAASlX,WAAT,GAAuBiD,cAAc1L,MAAd,IAAwB0L,cAAc,CAAd,EAAiB1L,MAAzC,WAAuD0L,cAAc,CAAd,EAAiB1L,MAAjB,GAA0B,CAAjF,IAAuF,EAA9G;;AAEA;AACA,QAAM8f,eAAe,EAArB;AAtB2D,QAuBnD9Y,MAvBmD,GAuBxC6Y,SAvBwC,CAuBnD7Y,MAvBmD;;AAwB3D,QAAM+Y,gBAAgB/Y,OAAOpI,GAAP,CAAW;AAAA,eAASqI,MAAM7B,IAAN,EAAT;AAAA,KAAX,CAAtB;AACA,QAAM4a,sBAAsBhZ,OAAOpI,GAAP,CAAW;AAAA,eAASqI,MAAMyE,aAAN,EAAT;AAAA,KAAX,CAA5B;AACAuC,wEAAkBA,CAAC0R,SAASlX,WAA5B,EAAyC,UAAC3J,CAAD,EAAO;AAC5CghB,qBAAahhB,CAAb,IAAkBwa,qBAAqBtS,MAArB,EAA6BgZ,mBAA7B,EAAkDD,aAAlD,EAAiEjhB,CAAjE,CAAlB;AACH,KAFD;AAGA+gB,cAAU1T,mBAAV,GAAgC2T,YAAhC;;AAEAH,aAAShX,cAAT,GAA2BtD,OAAOzG,GAAP,CAAW;AAAA,eAAKwL,EAAEvL,IAAP;AAAA,KAAX,CAAD,CAA0BI,IAA1B,EAA1B;AACA0gB,aAASpU,WAAT,GAAuBjG,QAAQU,UAAR,KAAuBd,iDAAUA,CAACC,IAAlC,GAAyCc,+DAAgBA,CAACb,IAAjB,CAAzC,GAAkEE,QAAQU,UAAjG;AACA,WAAO2Z,QAAP;AACH,CAlCM;;AAoCA,IAAMM,gBAAgB,SAAhBA,aAAgB,CAAC5a,MAAD,EAAS4B,KAAT,EAAmB;AAC5C,QAAInI,IAAI,CAAR;;AAEA,WAAOA,IAAIuG,OAAOrF,MAAlB,EAA0B,EAAElB,CAA5B,EAA+B;AAC3B,YAAImI,UAAU5B,OAAOvG,CAAP,EAAUD,IAAxB,EAA8B;AAC1B,mBAAO;AACHA,sBAAMoI,KADH;AAEHtC,sBAAMU,OAAOvG,CAAP,EAAUwR,OAAV,IAAqBjL,OAAOvG,CAAP,EAAU6F,IAFlC;AAGH8I,uBAAO3O;AAHJ,aAAP;AAKH;AACJ;AACD,WAAO,IAAP;AACH,CAbM;;AAeA,IAAMohB,yBAAyB,SAAzBA,sBAAyB,CAAC7B,UAAD,EAAgB;AAClD,QAAI3T,SAAS,EAAb;AACA,QAAI0P,kBAAJ;AACAA,gBAAYiE,WAAW9D,EAAvB;AACA,YAAQH,SAAR;AACA,aAAKrX,yDAAcA,CAACC,MAApB;AACI0H,qBAAS,CAAC2T,WAAW5D,QAAZ,CAAT;AACA;AACJ,aAAK1X,yDAAcA,CAACE,OAApB;AACIyH,qBAAS,CAAC2T,WAAW7D,IAAX,CAAgBwE,eAAjB,CAAT;AACA;AACJ,aAAKjc,yDAAcA,CAACO,IAApB;AACIoH,qBAAS,CAAC2T,WAAW5D,QAAZ,CAAT;AACA;AACJ,aAAK1X,yDAAcA,CAACG,OAApB;AACIkX,wBAAY,SAAZ;AACA1P,qBAAS,CAAC2T,WAAW7D,IAAX,CAAgB/P,aAAhB,CAA8BV,KAA9B,CAAoC,GAApC,CAAD,EAA2CsU,WAAW5D,QAAtD,CAAT;AACA;AACJ;AACIL,wBAAY,IAAZ;AAfJ;;AAkBA,WAAO;AACHA,4BADG;AAEH1P;AAFG,KAAP;AAIH,CA1BM;;AA4BP,IAAMyV,gCAAgC,SAAhCA,6BAAgC,CAACzQ,SAAD,EAAY0Q,SAAZ,EAA0B;AAC5D,QAAMC,cAAcD,UAAUE,cAAV,EAApB;AACA,QAAIC,iBAAiB7Q,SAArB;;AAEA2Q,gBAAY3gB,OAAZ,CAAoB,UAAC2e,UAAD,EAAgB;AAChC,YAAI,CAACA,UAAL,EAAiB;AACb;AACH;;AAH+B,oCAKF6B,uBAAuB7B,UAAvB,CALE;AAAA,YAKxBjE,SALwB,yBAKxBA,SALwB;AAAA,YAKb1P,MALa,yBAKbA,MALa;;AAMhC,YAAI0P,SAAJ,EAAe;AAAA;;AACXmG,6BAAiB,mCAAenG,SAAf,4CAA6B1P,MAA7B,UAAqC;AAClDF,2BAAW;AADuC,aAArC,GAAjB;AAGH;AACJ,KAXD;;AAaA,WAAO+V,cAAP;AACH,CAlBD;;AAoBA,IAAMC,mBAAmB,SAAnBA,gBAAmB,CAAC9Q,SAAD,EAAY+Q,IAAZ,EAAqB;AAC1C,SAAK,IAAI3hB,IAAI,CAAR,EAAW4d,MAAM+D,KAAKzgB,MAA3B,EAAmClB,IAAI4d,GAAvC,EAA4C5d,GAA5C,EAAiD;AAC7C,YAAMsQ,QAAQqR,KAAK3hB,CAAL,CAAd;AACA4Q,oBAAYyQ,8BAA8BzQ,SAA9B,EAAyCN,KAAzC,CAAZ;AACH;AACD,WAAOM,SAAP;AACH,CAND;;AAQA,IAAMgR,uBAAuB,SAAvBA,oBAAuB,CAACN,SAAD,EAAY1Q,SAAZ,EAA0D;AAAA,QAAnCnF,MAAmC,uEAA1B,EAA0B;AAAA,QAAtBoW,YAAsB,uEAAP,EAAO;;AACnF,QAAMC,qBAAqBD,aAAaC,kBAAxC;AACA,QAAMC,gBAAgBF,aAAaE,aAAb,IAA8B,EAApD;;AAEA,QAAIT,cAAcQ,kBAAlB,EAAsC;AAClC;AACH;;AAED,QAAME,YAAYD,cAAc7gB,MAAd,GAAuB6gB,cAAcxX,OAAd,CAAsB+W,SAAtB,MAAqC,CAAC,CAA7D,GAAiE,IAAnF;;AAEAU,iBAAaV,UAAUW,iBAAV,CAA4BrR,SAA5B,EAAuCnF,MAAvC,CAAb;;AAEA,QAAMyW,WAAWZ,UAAUa,SAA3B;AACAD,aAASthB,OAAT,CAAiB,UAACwhB,KAAD,EAAW;AACxB,YAAMX,iBAAiBJ,8BAA8BzQ,SAA9B,EAAyCwR,KAAzC,CAAvB;AACAR,6BAAqBQ,KAArB,EAA4BX,cAA5B,EAA4ChW,MAA5C,EAAoDoW,YAApD;AACH,KAHD;AAIH,CAjBD;;AAmBO,IAAM1R,sBAAsB,SAAtBA,mBAAsB,CAACG,KAAD,EAAW;AAC1C,WAAOA,MAAM+R,OAAN,IAAiB/R,MAAMkL,WAAN,CAAkB8G,IAAlB,CAAuB;AAAA,eAAK1Y,EAAE6R,EAAF,KAASxX,yDAAcA,CAACG,OAA7B;AAAA,KAAvB,CAAxB,EAAsF;AAClFkM,gBAAQA,MAAM+R,OAAd;AACH;AACD,WAAO/R,KAAP;AACH,CALM;;AAOA,IAAMP,mBAAmB,SAAnBA,gBAAmB,CAACO,KAAD,EAAW;AACvC,WAAOA,MAAM+R,OAAb,EAAsB;AAClB/R,gBAAQA,MAAM+R,OAAd;AACH;AACD,WAAO/R,KAAP;AACH,CALM;;AAOA,IAAMiS,qBAAqB,SAArBA,kBAAqB,CAACjS,KAAD,EAAsB;AAAA,QAAdqR,IAAc,uEAAP,EAAO;;AACpD,WAAOrR,MAAM+R,OAAb,EAAsB;AAClBV,aAAK7gB,IAAL,CAAUwP,KAAV;AACAA,gBAAQA,MAAM+R,OAAd;AACH;AACD,WAAOV,IAAP;AACH,CANM;;AAQA,IAAMnR,2BAA2B,SAA3BA,wBAA2B,CAACjB,WAAD,EAAca,UAAd,EAA0BoS,cAA1B,EAA0C/W,MAA1C,EAAqD;AACzF,QAAIkQ,iBAAJ;AACA,QAAI/K,kBAAJ;AAFyF,QAGjFZ,oBAHiF,GAGrCwS,cAHqC,CAGjFxS,oBAHiF;AAAA,QAG3DyS,iBAH2D,GAGrCD,cAHqC,CAG3DC,iBAH2D;;AAIzF,QAAM9S,sBAAsB6S,eAAe5S,QAA3C;AACA,QAAM8S,8BAA8BjX,OAAOiX,2BAA3C;AACA,QAAMC,WAAW,SAAXA,QAAW,CAACC,KAAD,EAAW;AACxB,YAAMhJ,SAASnO,OAAOkX,QAAP,IAAoB;AAAA,mBAAM,IAAN;AAAA,SAAnC;AACA,eAAO/I,OAAOgJ,KAAP,EAAcnX,MAAd,CAAP;AACH,KAHD;;AAKA,QAAIoX,YAAY,EAAhB;;AAEA,QAAItT,gBAAgB,IAAhB,IAAwB9D,OAAOqX,UAAP,KAAsB,IAAlD,EAAwD;AACpDD,oBAAY,CAAC;AACTlH,sBAAU;AADD,SAAD,CAAZ;AAGAA,mBAAW,EAAX;AACH,KALD,MAKO;AAAA;;AACH,YAAIoH,kBAAkBriB,OAAOme,MAAP,CAAc7O,qBAAqBgT,cAAnC,CAAtB;AACA,YAAIP,sBAAsB,KAA1B,EAAiC;AAC7BM,8BAAkBA,gBAAgBnJ,MAAhB,CAAuB;AAAA,uBAAKhQ,EAAE6B,MAAF,CAASmE,QAAT,KAAsBD,mBAA3B;AAAA,aAAvB,CAAlB;AACH;;AAED,YAAMsT,mBAAmBF,gBAAgBnJ,MAAhB,CAAuB+I,QAAvB,EAAiC7iB,GAAjC,CAAqC;AAAA,mBAAUojB,OAAOzX,MAAP,CAAckQ,QAAxB;AAAA,SAArC,CAAzB;;AAEA,YAAMoG,gBAAgB,EAAtB;;AAEA,YAAIU,sBAAsB,KAA1B,EAAiC;AAC7B,gBAAMU,wBAAwBziB,OAAOme,MAAP,CAAc7O,qBAAqBgT,cAAnC,CAA9B;;AAEAG,kCAAsBviB,OAAtB,CAA8B,UAACwiB,SAAD,EAAe;AACzC,oBAAMC,aAAaD,UAAU3X,MAA7B;AACA,oBAAI4X,WAAWC,aAAX,KAA6B,KAA7B,IAAsCD,WAAWH,MAAX,KAAsBzX,OAAOyX,MAAnE,IACIG,WAAWzT,QAAX,KAAwBD,mBADhC,EACqD;AACjDoS,kCAAcjhB,IAAd,CAAmBsiB,UAAU9S,KAA7B;AACAqL,+BAAWwH,sBAAsBvJ,MAAtB,CAA6B;AAAA,+BAAKhQ,MAAMwZ,SAAX;AAAA,qBAA7B,EAAmDtjB,GAAnD,CAAuD;AAAA,+BAAK8J,EAAE6B,MAAF,CAASkQ,QAAd;AAAA,qBAAvD,CAAX;AACAA,6BAASza,MAAT,IAAmB2hB,UAAU/hB,IAAV,CAAe;AAC9B6a,0CAD8B;AAE9B4H,gCAAQH,UAAU9S,KAFY;AAG9BqR,8BAAMY,mBAAmBa,UAAU9S,KAA7B;AAHwB,qBAAf,CAAnB;AAKH;AACJ,aAZD;AAaH;;AAGDqL,mBAAW,aAAGpY,MAAH,2CAAiB0f,gBAAjB,IAAmC1T,WAAnC,IAAiDqK,MAAjD,CAAwD;AAAA,mBAAKhQ,MAAM,IAAX;AAAA,SAAxD,CAAX;AACAiZ,kBAAU/hB,IAAV,CAAe;AACX6a,8BADW;AAEXoG,qCAAmBA,aAAnB,qBAAqCtW,OAAOsW,aAAP,IAAwB,EAA7D;AAFW,SAAf;AAIH;;AAED,QAAMjS,YAAYM,WAAWE,KAA7B;;AAEA,QAAMb,aAAa/O,OAAOgH,MAAP,CAAc;AAC7B8b,2BAAmBjU,WADU;AAE7BI;AAF6B,KAAd,EAGhBlE,MAHgB,CAAnB;;AAKA,QAAMyE,mBAAmBE,WAAWC,YAApC;AACA,QAAIqS,+BAA+BxS,gBAAnC,EAAqD;AACjDU,oBAAY0N,uBAAuBpO,gBAAvB,EAAyCyL,QAAzC,EAAmD;AAC3D8C,6BAAiBiE;AAD0C,SAAnD,CAAZ;AAGAd,6BAAqB1R,gBAArB,EAAuCU,SAAvC,EAAkDnB,UAAlD;AACH;;AAEDoT,cAAUjiB,OAAV,CAAkB,UAAC6iB,GAAD,EAAS;AACvB,YAAMC,mBAAmBpF,uBAAuBxO,SAAvB,EAAkC2T,IAAI9H,QAAtC,CAAzB;AACA,YAAMgG,OAAO8B,IAAI9B,IAAjB;;AAEA,YAAIA,IAAJ,EAAU;AACN,gBAAMvC,gBAAgBsC,iBAAiBgC,gBAAjB,EAAmC/B,KAAKgC,OAAL,EAAnC,CAAtB;AACAF,gBAAIF,MAAJ,CAAWtB,iBAAX,CAA6B7C,aAA7B,EAA4C3P,UAA5C;AACH,SAHD,MAGO;AACHmS,iCAAqB9R,SAArB,EAAgC4T,gBAAhC,EAAkDjU,UAAlD,EAA8D;AAC1DsS,+BAAe0B,IAAI1B,aADuC;AAE1DD,oCAAoBY,+BAA+BxS;AAFO,aAA9D;AAIH;AACJ,KAbD;AAcH,CAnFM;;AAqFA,IAAMO,4BAA4B,SAA5BA,yBAA4B,CAACT,oBAAD,EAAuBI,UAAvB,EAAmCoS,cAAnC,EAAsD;AAC3F,QAAMoB,mBAAmB5T,qBAAqB4T,gBAA9C;;AAEA,SAAK,IAAMV,MAAX,IAAqBU,gBAArB,EAAuC;AACnC,YAAMR,YAAYQ,iBAAiBV,MAAjB,CAAlB;AACA,YAAMG,aAAaD,UAAU3X,MAA7B;AACA,YAAMkE,sBAAsB6S,eAAe/W,MAAf,CAAsBmE,QAAlD;AACA,YAAMiU,wBAAwBrB,eAAe/S,UAAf,CAA0BoU,qBAA1B,GAC1BrB,eAAe/S,UAAf,CAA0BoU,qBAA1B,CAAgDR,UAAhD,EAA4Db,eAAe/W,MAA3E,CAD0B,GAC2D,IADzF;AAEA,YAAI4X,WAAWzT,QAAX,KAAwBD,mBAAxB,IAA+CkU,qBAAnD,EAA0E;AACtE,gBAAMC,gBAAgBT,WAAW1H,QAAjC;AACAnL,qCAAyBsT,aAAzB,EAAwC1T,UAAxC,EAAoD;AAChDJ,0DADgD;AAEhDyS,mCAAmB,KAF6B;AAGhD7S,0BAAUD;AAHsC,aAApD,EAIG0T,UAJH;AAKH;AACJ;AACJ,CAlBM;;AAoBA,IAAM9S,qBAAqB,SAArBA,kBAAqB,CAACP,oBAAD,EAA8C;AAAA,QAAvBvE,MAAuB,uEAAd,EAAc;AAAA,QAAV6E,KAAU;;AAC5E,QAAIyT,wBAAJ;AACA,QAAMrU,kBAAkBjE,OAAOiE,eAA/B;AACA,QAAMiM,WAAWlQ,OAAOkQ,QAAxB;AACA,QAAMzd,MAASuN,OAAOyX,MAAhB,SAA0BzX,OAAOmE,QAAvC;;AAEA,QAAIF,eAAJ,EAAqB;AACjBqU,0BAAkB/T,qBAAqBgT,cAAvC;AACH,KAFD,MAEO;AACHe,0BAAkB/T,qBAAqB4T,gBAAvC;AACH;;AAED,QAAIjI,aAAa,IAAjB,EAAuB;AACnB,eAAOoI,gBAAgB7lB,GAAhB,CAAP;AACH,KAFD,MAEO;AACH6lB,wBAAgB7lB,GAAhB,IAAuB;AACnBoS,wBADmB;AAEnB7E;AAFmB,SAAvB;AAIH;;AAED,WAAO,KAAP;AACH,CAtBM;;AAyBA,IAAMiH,yBAAyB,SAAzBA,sBAAyB,CAACsN,SAAD,EAAYzN,SAAZ,EAAuBD,WAAvB,EAAuC;AACzE,QAAM0R,sBAAsBhE,UAAUjY,MAAV,CAAiB,UAACC,GAAD,EAAMG,KAAN,EAAgB;AACzD,YAAIA,MAAMqE,WAAN,CAAkBzM,IAAlB,KAA2B,QAA/B,EAAyC;AACrCiI,gBAAIlH,IAAJ,+BAAYyR,UAAUqH,MAAV,CAAiB;AAAA,uBAAa3M,UAAUgX,MAAV,CAAiB9b,KAAjB,MAA4B,CAAC,CAA1C;AAAA,aAAjB,CAAZ;AACH,SAFD,MAEO,IAAIA,SAASmK,WAAb,EAA0B;AAC7BtK,gBAAIlH,IAAJ,CAASqH,KAAT;AACH;AACD,eAAOH,GAAP;AACH,KAP2B,EAOzB,EAPyB,CAA5B;AAQA,WAAO7G,MAAM+iB,IAAN,CAAW,IAAI1N,GAAJ,CAAQwN,mBAAR,CAAX,EAAyClkB,GAAzC,CAA6C;AAAA,eAASqI,MAAM/J,IAAN,EAAT;AAAA,KAA7C,CAAP;AACH,CAVM;;AAYP;;;;;;;AAOO,IAAM+lB,wBAAwB,SAAxBA,qBAAwB,CAAChc,KAAD,EAAQhK,KAAR,EAAkB;AACnD,QAAIgK,MAAMmQ,YAAV,EAAwB;AACpB,eAAOnQ,MAAMmQ,YAAN,GAAqBna,KAArB,CAAP;AACH;AACD,WAAOA,KAAP;AACH,CALM,C;;;;;;;;;;;AC9rBP,IAAM2K,YAAYsb,mBAAOA,CAAC,iCAAR,CAAlB;;AAEAC,OAAOC,OAAP,GAAiBxb,UAAUyb,OAAV,GAAoBzb,UAAUyb,OAA9B,GAAwCzb,SAAzD,C;;;;;;;;;;;;;;;;;ACFA;;;;;;IAMM8J,iB;;;;AACF;;;;;;;yCAOyBnH,M,EAAQ;AAC7B,gBAAI,CAACA,MAAL,EAAa;AACT,uBAAOmH,kBAAkB4R,oBAAzB;AACH;AACD,mBAAO9jB,OAAOgH,MAAP,CAAckL,kBAAkB4R,oBAAhC,EAAsD/Y,MAAtD,CAAP;AACH;;AAED;;;;;;;;;AAMA,+BAAatN,KAAb,EAAoB;AAAA;;AAChB,aAAKsmB,MAAL,GAActmB,KAAd;AACH;;AAED;;;;;;;;;;gCAMS;AACL,mBAAO,KAAKsmB,MAAZ;AACH;;AAED;;;;;;;;;mCAMY;AACR,mBAAO/L,OAAO,KAAK+L,MAAZ,CAAP;AACH;;;kCAEgBjM,G,EAAK;AAClB,mBAAQA,eAAe5F,iBAAhB,IAAsC,CAAC,CAACA,kBAAkBC,gBAAlB,GAAqC2F,GAArC,CAA/C;AACH;;;uCAEqBA,G,EAAK;AACvB,mBAAOA,eAAe5F,iBAAf,GAAmC4F,GAAnC,GAAyC5F,kBAAkBC,gBAAlB,GAAqC2F,GAArC,CAAhD;AACH;;;;;;AAGL;;;;;AAGA5F,kBAAkB8R,IAAlB,GAAyB,IAAI9R,iBAAJ,CAAsB,MAAtB,CAAzB;AACAA,kBAAkBoG,EAAlB,GAAuB,IAAIpG,iBAAJ,CAAsB,IAAtB,CAAvB;AACAA,kBAAkB+R,GAAlB,GAAwB,IAAI/R,iBAAJ,CAAsB,KAAtB,CAAxB;;AAEA;;;;;AAKAA,kBAAkB4R,oBAAlB,GAAyC;AACrCI,aAAShS,kBAAkBoG,EADU;AAErC6L,SAAKjS,kBAAkB+R,GAFc;AAGrCG,UAAMlS,kBAAkB8R,IAHa;AAIrC7Z,eAAW+H,kBAAkBoG;AAJQ,CAAzC;;AAOepG,gFAAf,E;;;;;;;;;;;;;;;;;;AC/EA;AACA;;AAEA,IAAMmS,kBAAkB,SAAlBA,eAAkB,CAACC,OAAD,EAAU7Z,KAAV,EAAiBC,GAAjB,EAAyB;AAC7C,QAAM6Z,UAAU,EAAhB;AACA,QAAI5a,OAAOc,KAAX;;AAEA,WAAOd,OAAOe,GAAd,EAAmB;AACf6Z,gBAAQnkB,IAAR,CAAauJ,IAAb;AACAA,gBAAQ2a,OAAR;AACH;AACDC,YAAQnkB,IAAR,CAAauJ,IAAb;;AAEA,WAAO4a,OAAP;AACH,CAXD;;AAaA,IAAMC,kBAAkB,SAAlBA,eAAkB,CAACC,YAAD,EAAehnB,KAAf,EAAyB;AAC7C,QAAIinB,UAAU,CAAd;AACA,QAAIC,WAAWF,aAAajkB,MAAb,GAAsB,CAArC;AACA,QAAIokB,eAAJ;AACA,QAAI1G,cAAJ;;AAEA;AACA,WAAOwG,WAAWC,QAAlB,EAA4B;AACxBC,iBAASF,UAAUhL,KAAKmL,KAAL,CAAW,CAACF,WAAWD,OAAZ,IAAuB,CAAlC,CAAnB;AACAxG,gBAAQuG,aAAaG,MAAb,CAAR;;AAEA,YAAInnB,SAASygB,MAAMzT,KAAf,IAAwBhN,QAAQygB,MAAMxT,GAA1C,EAA+C;AAC3C,mBAAOwT,KAAP;AACH,SAFD,MAEO,IAAIzgB,SAASygB,MAAMxT,GAAnB,EAAwB;AAC3Bga,sBAAUE,SAAS,CAAnB;AACH,SAFM,MAEA,IAAInnB,QAAQygB,MAAMzT,KAAlB,EAAyB;AAC5Bka,uBAAWC,SAAS,CAApB;AACH;AACJ;;AAED,WAAO,IAAP;AACH,CArBD;;AAuBC;;;;;;;;AAQM,SAASpU,qBAAT,CAAgCD,YAAhC,EAA8CnG,UAA9C,EAA0DW,MAA1D,EAAkE;AAAA,QAC/DwZ,OAD+D,GACnBxZ,MADmB,CAC/DwZ,OAD+D;AAAA,QACtDO,SADsD,GACnB/Z,MADmB,CACtD+Z,SADsD;AAAA,QAC3CR,OAD2C,GACnBvZ,MADmB,CAC3CuZ,OAD2C;AAAA,QAClC7Z,KADkC,GACnBM,MADmB,CAClCN,KADkC;AAAA,QAC3BC,GAD2B,GACnBK,MADmB,CAC3BL,GAD2B;;AAAA,+BAEhD6F,aAAawF,MAAb,EAFgD;AAAA;AAAA,QAE9DgP,IAF8D;AAAA,QAExDC,IAFwD;;AAIrE,QAAI,CAACT,OAAL,EAAc;AACV9Z,gBAASA,UAAU,CAAV,KAAgB,CAACA,KAAD,IAAUA,QAAQsa,IAAlC,CAAD,GAA4CA,IAA5C,GAAmDta,KAA3D;AACAC,cAAOA,QAAQ,CAAR,KAAc,CAACA,GAAD,IAAQA,MAAMsa,IAA5B,CAAD,GAAuCA,OAAO,CAA9C,GAAmDta,GAAzD;;AAEA,YAAIoa,SAAJ,EAAe;AACXR,sBAAU5K,KAAKuL,IAAL,CAAUvL,KAAKwL,GAAL,CAASxa,MAAMD,KAAf,IAAwBqa,SAAlC,CAAV;AACH;;AAEDP,kBAAUF,gBAAgBC,OAAhB,EAAyB7Z,KAAzB,EAAgCC,GAAhC,CAAV;AACH;;AAED,QAAI6Z,QAAQ,CAAR,IAAaQ,IAAjB,EAAuB;AACnBR,gBAAQY,OAAR,CAAgBJ,IAAhB;AACH;AACD,QAAIR,QAAQA,QAAQ/jB,MAAR,GAAiB,CAAzB,KAA+BwkB,IAAnC,EAAyC;AACrCT,gBAAQnkB,IAAR,CAAa4kB,OAAO,CAApB;AACH;;AAED,QAAMP,eAAe,EAArB;AACA,SAAK,IAAInlB,IAAI,CAAb,EAAgBA,IAAIilB,QAAQ/jB,MAAR,GAAiB,CAArC,EAAwClB,GAAxC,EAA6C;AACzCmlB,qBAAarkB,IAAb,CAAkB;AACdqK,mBAAO8Z,QAAQjlB,CAAR,CADO;AAEdoL,iBAAK6Z,QAAQjlB,IAAI,CAAZ;AAFS,SAAlB;AAIH;;AAED,QAAMmR,aAAa,EAAnB;AACAhC,oFAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAM2K,QAAQsG,aAAa3D,YAAb,CAA0BhH,IAA1B,CAA+BtG,CAA/B,CAAd;AACA,YAAI2K,iBAAiBiI,4DAArB,EAAwC;AACpCzB,uBAAWrQ,IAAX,CAAgB6J,KAAhB;AACA;AACH;;AAED,YAAMiU,QAAQsG,gBAAgBC,YAAhB,EAA8Bxa,KAA9B,CAAd;AACAwG,mBAAWrQ,IAAX,CAAmB8d,MAAMzT,KAAzB,SAAkCyT,MAAMxT,GAAxC;AACH,KATD;;AAWA,WAAO,EAAE+F,sBAAF,EAAcC,MAAM6T,OAApB,EAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;;AC1FD;AACA;;AAEA;;;;;;;;;;;;;;AAcA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CO,IAAM9Q,SAAS,SAATA,MAAS;AAAA,sCAAIpL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAG3R,MAAH,WAAapL,IAAb,CAAN;AAAA,KAAb;AAAA,CAAf;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BO,IAAMqL,UAAU,SAAVA,OAAU;AAAA,uCAAIrL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAG1R,OAAH,WAAcrL,IAAd,CAAN;AAAA,KAAb;AAAA,CAAhB;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyDO,IAAMmL,MAAM,SAANA,GAAM;AAAA,uCAAInL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAG5R,GAAH,WAAUnL,IAAV,CAAN;AAAA,KAAb;AAAA,CAAZ;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BO,IAAM+C,UAAU,SAAVA,OAAU;AAAA,uCAAI/C,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAM+c,GAAGha,OAAH,WAAc/C,IAAd,CAAN;AAAA,KAAb;AAAA,CAAhB;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCO,IAAMkL,UAAU,SAAVA,OAAU;AAAA,uCAAI8R,UAAJ;AAAIA,kBAAJ;AAAA;;AAAA,WACnB,UAACD,EAAD,EAAsC;AAAA,YAAjCra,MAAiC,uEAAxB,EAAEC,WAAW,IAAb,EAAwB;;AAClC,YAAIsa,YAAYF,EAAhB;AACA,YAAIG,mBAAJ;AACA,YAAM1E,cAAc,EAApB;;AAEAwE,mBAAWnlB,OAAX,CAAmB,UAAC0a,SAAD,EAAe;AAC9B0K,wBAAY1K,UAAU0K,SAAV,CAAZ;AACAzE,wBAAYzgB,IAAZ,uCAAoBklB,UAAUxK,WAA9B;AACA,gBAAI,CAACyK,UAAL,EAAiB;AACbA,6BAAaD,SAAb;AACH;AACJ,SAND;;AAQA,YAAIC,cAAcA,eAAeD,SAAjC,EAA4C;AACxCC,uBAAWC,OAAX;AACH;;AAED;AACAF,kBAAUjK,mBAAV,GAAgC,EAAhC;AACAhQ,0EAAkBA,CACd+Z,EADJ,EAEIE,SAFJ,EAGI/hB,yDAAcA,CAACI,OAHnB,EAII,IAJJ,EAKIkd,WALJ;;AAQA,YAAI9V,OAAOC,SAAX,EAAsB;AAClBsa,sBAAU9Z,SAAV,CAAoB4Z,EAApB;AACH,SAFD,MAEO;AACHE,sBAAU9Z,SAAV,CAAoB,IAApB;AACH;;AAED,eAAO8Z,SAAP;AACH,KAnCkB;AAAA,CAAhB,C;;;;;;;;;;;;ACvNP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AAKA,SAASG,eAAT,GAA2B;AAAE,WAAO,IAAP;AAAc;;AAE3C;;;;;;;;;;AAUO,SAAS7R,YAAT,CAAuB8R,GAAvB,EAA4BC,GAA5B,EAAiC1D,QAAjC,EAAgG;AAAA,QAArD2D,mBAAqD,uEAA/B,KAA+B;AAAA,QAAxBC,QAAwB,uEAAb9hB,gDAAKA,CAACC,KAAO;;AACnG,QAAM6B,SAAS,EAAf;AACA,QAAMD,OAAO,EAAb;AACA,QAAMkgB,qBAAqB7D,YAAYwD,eAAvC;AACA,QAAMM,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA,QAAMia,oBAAoBF,cAAc1mB,IAAxC;AACA,QAAM6mB,oBAAoBF,cAAc3mB,IAAxC;AACA,QAAMA,OAAU0mB,cAAc1mB,IAAxB,SAAgC2mB,cAAc3mB,IAApD;AACA,QAAM8mB,mBAAmBC,0EAAeA,CAACL,aAAhB,EAA+BC,aAA/B,CAAzB;;AAEA,QAAIC,sBAAsBC,iBAA1B,EAA6C;AACzC,cAAM,IAAI5f,KAAJ,CAAU,4CAAV,CAAN;AACH;AACD;AACAyf,kBAAcve,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,YAAM4e,YAAY1G,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAlB;AACA,YAAIsgB,iBAAiBtc,OAAjB,CAAyBwc,UAAUhnB,IAAnC,MAA6C,CAAC,CAA9C,IAAmD,CAACumB,mBAAxD,EAA6E;AACzES,sBAAUhnB,IAAV,GAAoB0mB,cAAc1mB,IAAlC,SAA0CgnB,UAAUhnB,IAApD;AACH;AACDwG,eAAOzF,IAAP,CAAYimB,SAAZ;AACH,KAND;AAOAL,kBAAcxe,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,YAAM4e,YAAY1G,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAlB;AACA,YAAIsgB,iBAAiBtc,OAAjB,CAAyBwc,UAAUhnB,IAAnC,MAA6C,CAAC,CAAlD,EAAqD;AACjD,gBAAI,CAACumB,mBAAL,EAA0B;AACtBS,0BAAUhnB,IAAV,GAAoB2mB,cAAc3mB,IAAlC,SAA0CgnB,UAAUhnB,IAApD;AACAwG,uBAAOzF,IAAP,CAAYimB,SAAZ;AACH;AACJ,SALD,MAKO;AACHxgB,mBAAOzF,IAAP,CAAYimB,SAAZ;AACH;AACJ,KAVD;;AAYA;AACA5X,oFAAkBA,CAACiX,IAAIzc,WAAvB,EAAoC,UAAC3J,CAAD,EAAO;AACvC,YAAIgnB,WAAW,KAAf;AACA,YAAIC,oBAAJ;AACA9X,wFAAkBA,CAACkX,IAAI1c,WAAvB,EAAoC,UAACud,EAAD,EAAQ;AACxC,gBAAMC,QAAQ,EAAd;AACA,gBAAMC,UAAU,EAAhB;AACAA,oBAAQT,iBAAR,IAA6B,EAA7B;AACAS,oBAAQR,iBAAR,IAA6B,EAA7B;AACAH,0BAAcve,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpCgf,sBAAMrmB,IAAN,CAAWqH,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAAX;AACAonB,wBAAQT,iBAAR,EAA2Bxe,MAAMpI,IAAN,EAA3B,IAA2C;AACvC8a,8BAAU1S,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAD6B;AAEvC4a,oCAAgBzS,MAAMyE,aAAN,GAAsB5M,CAAtB;AAFuB,iBAA3C;AAIH,aAND;AAOA0mB,0BAAcxe,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,oBAAI,EAAE0e,iBAAiBtc,OAAjB,CAAyBpC,MAAM5B,MAAN,GAAexG,IAAxC,MAAkD,CAAC,CAAnD,IAAwDumB,mBAA1D,CAAJ,EAAoF;AAChFa,0BAAMrmB,IAAN,CAAWqH,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwB4gB,EAAxB,CAAX;AACH;AACDE,wBAAQR,iBAAR,EAA2Bze,MAAMpI,IAAN,EAA3B,IAA2C;AACvC8a,8BAAU1S,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwB4gB,EAAxB,CAD6B;AAEvCtM,oCAAgBzS,MAAMyE,aAAN,GAAsBsa,EAAtB;AAFuB,iBAA3C;AAIH,aARD;;AAUA,gBAAInY,cAAc,EAAlB;AACA,gBAAIsY,iBAAiB,SAAjBA,cAAiB;AAAA,uBAAMjB,IAAInX,YAAJ,EAAN;AAAA,aAArB;AACA,gBAAIqY,iBAAiB,SAAjBA,cAAiB;AAAA,uBAAMjB,IAAIpX,YAAJ,EAAN;AAAA,aAArB;;AAEA,gBAAMsY,YAAY5M,+DAAeA,CAACyM,QAAQT,iBAAR,CAAhB,CAAlB;AACA,gBAAMa,YAAY7M,+DAAeA,CAACyM,QAAQR,iBAAR,CAAhB,CAAlB;AACA,gBAAIJ,mBAAmBe,SAAnB,EAA8BC,SAA9B,EAAyCH,cAAzC,EAAyDC,cAAzD,EAAyEvY,WAAzE,CAAJ,EAA2F;AACvF,oBAAM0Y,WAAW,EAAjB;AACAN,sBAAMvmB,OAAN,CAAc,UAAC8mB,OAAD,EAAUC,GAAV,EAAkB;AAC5BF,6BAASlhB,OAAOohB,GAAP,EAAY5nB,IAArB,IAA6B2nB,OAA7B;AACH,iBAFD;AAGA,oBAAIV,YAAYviB,gDAAKA,CAACC,KAAN,KAAgB6hB,QAAhC,EAA0C;AACtCjgB,yBAAK2gB,WAAL,IAAoBQ,QAApB;AACH,iBAFD,MAGK;AACDnhB,yBAAKxF,IAAL,CAAU2mB,QAAV;AACAT,+BAAW,IAAX;AACAC,kCAAcjnB,CAAd;AACH;AACJ,aAbD,MAaO,IAAI,CAACumB,aAAa9hB,gDAAKA,CAACE,SAAnB,IAAgC4hB,aAAa9hB,gDAAKA,CAACG,UAApD,KAAmE,CAACoiB,QAAxE,EAAkF;AACrF,oBAAMS,YAAW,EAAjB;AACA,oBAAI7J,MAAM6I,cAAcve,MAAd,CAAqBhH,MAArB,GAA8B,CAAxC;AACAimB,sBAAMvmB,OAAN,CAAc,UAAC8mB,OAAD,EAAUC,GAAV,EAAkB;AAC5B,wBAAIA,OAAO/J,GAAX,EAAgB;AACZ6J,kCAASlhB,OAAOohB,GAAP,EAAY5nB,IAArB,IAA6B2nB,OAA7B;AACH,qBAFD,MAGK;AACDD,kCAASlhB,OAAOohB,GAAP,EAAY5nB,IAArB,IAA6B,IAA7B;AACH;AACJ,iBAPD;AAQAinB,2BAAW,IAAX;AACAC,8BAAcjnB,CAAd;AACAsG,qBAAKxF,IAAL,CAAU2mB,SAAV;AACH;AACJ,SAxDD;AAyDH,KA5DD;;AA8DA,WAAO,IAAI3e,kDAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;;;;;;;ACzHD;AACA;;AAEA;;;;;;;;;;;AAWO,SAAS0J,WAAT,CAAsBgM,UAAtB,EAAkC3K,UAAlC,EAA8CkQ,aAA9C,EAA6D7O,cAA7D,EAA6E3F,OAA7E,EAAsF;AACzF,QAAMyC,aAAa;AACfc,gBAAQ,KADO;AAEfD,oBAAY;AAFG,KAAnB;AAIAtD,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBuB,UAAlB,EAA8BzC,OAA9B,CAAV;;AAEA,QAAMohB,SAAS;AACXrhB,gBAAQ,EADG;AAEXD,cAAM,EAFK;AAGX0D,cAAM;AAHK,KAAf;AAKA,QAAMD,SAASvD,QAAQuD,MAAvB;AACA,QAAM8d,aAAa1b,kBAAkBA,eAAejL,MAAf,GAAwB,CAA7D;AACA;AACA,QAAM4mB,aAAa,EAAnB;AACA;AACA,QAAMC,UAAU/M,cAAc/P,KAAd,CAAoB,GAApB,CAAhB;;AAEA8c,YAAQnnB,OAAR,CAAgB,UAAConB,OAAD,EAAa;AACzB,aAAK,IAAIhoB,IAAI,CAAb,EAAgBA,IAAIyV,WAAWvU,MAA/B,EAAuClB,KAAK,CAA5C,EAA+C;AAC3C,gBAAIyV,WAAWzV,CAAX,EAAcD,IAAd,OAAyBioB,OAA7B,EAAsC;AAClCF,2BAAWhnB,IAAX,CAAgB2U,WAAWzV,CAAX,CAAhB;AACA;AACH;AACJ;AACJ,KAPD;;AASA;AACA8nB,eAAWlnB,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B;AACAyf,eAAOrhB,MAAP,CAAczF,IAAd,CAAmBqH,MAAM5B,MAAN,EAAnB;AACH,KAHD;;AAKA,QAAIwD,MAAJ,EAAY;AACR6d,eAAOrhB,MAAP,CAAczF,IAAd,CAAmB;AACff,kBAAM,KADS;AAEf8F,kBAAM;AAFS,SAAnB;AAIH;;AAEDsJ,oFAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC4nB,eAAOthB,IAAP,CAAYxF,IAAZ,CAAiB,EAAjB;AACA,YAAMmnB,YAAYL,OAAOthB,IAAP,CAAYpF,MAAZ,GAAqB,CAAvC;AACA,YAAIiK,QAAQ,CAAZ;AACA2c,mBAAWlnB,OAAX,CAAmB,UAACuH,KAAD,EAAQ+e,EAAR,EAAe;AAC9BU,mBAAOthB,IAAP,CAAY2hB,SAAZ,EAAuBf,KAAK/b,KAA5B,IAAqChD,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAArC;AACH,SAFD;AAGA,YAAI+J,MAAJ,EAAY;AACR6d,mBAAOthB,IAAP,CAAY2hB,SAAZ,EAAuBH,WAAW5mB,MAAlC,IAA4ClB,CAA5C;AACH;AACD;AACA4nB,eAAO5d,IAAP,CAAYlJ,IAAZ,CAAiBd,CAAjB;;AAEA;AACA;AACA,YAAI6nB,UAAJ,EAAgB;AAAED,mBAAOthB,IAAP,CAAY2hB,SAAZ,EAAuBnnB,IAAvB,CAA4Bd,CAA5B;AAAiC;AACtD,KAhBD;;AAkBA;AACA,QAAI6nB,UAAJ,EAAgB;AACZK,8DAAQA,CAACN,MAAT,EAAiBzb,cAAjB;AACH;;AAED,QAAI3F,QAAQsD,UAAZ,EAAwB;AACpB,YAAMqe,UAAUhnB,0CAASA,MAAMymB,OAAOrhB,MAAP,CAAcrF,MAApB,CAAT,GAAsCpB,GAAtC,CAA0C;AAAA,mBAAM,EAAN;AAAA,SAA1C,CAAhB;AACA8nB,eAAOthB,IAAP,CAAY1F,OAAZ,CAAoB,UAACumB,KAAD,EAAW;AAC3BA,kBAAMvmB,OAAN,CAAc,UAAC0F,IAAD,EAAOtG,CAAP,EAAa;AACvBmoB,wBAAQnoB,CAAR,EAAWc,IAAX,CAAgBwF,IAAhB;AACH,aAFD;AAGH,SAJD;AAKAshB,eAAOthB,IAAP,GAAc6hB,OAAd;AACH;;AAED,WAAOP,MAAP;AACH,C;;;;;;;;;;;;ACzFD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;AAEA;;;;;;;;;AASO,SAASrT,UAAT,CAAqB6R,GAArB,EAA0BC,GAA1B,EAA+B;AAClC,QAAM+B,YAAY,EAAlB;AACA,QAAM7hB,SAAS,EAAf;AACA,QAAM8hB,gBAAgB,EAAtB;AACA,QAAM/hB,OAAO,EAAb;AACA,QAAMmgB,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA,QAAM4b,wBAAwB7B,cAAclZ,SAAd,EAA9B;AACA,QAAMgb,wBAAwB7B,cAAcnZ,SAAd,EAA9B;AACA,QAAMxN,OAAU0mB,cAAc1mB,IAAxB,eAAsC2mB,cAAc3mB,IAA1D;;AAED;AACC,QAAI,CAACyoB,gEAAUA,CAACpC,IAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAX,EAAiD+c,IAAIxc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAjD,CAAL,EAA6F;AACzF,eAAO,IAAP;AACH;;AAED;AACC8c,QAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,CAAD,CAAgCrK,OAAhC,CAAwC,UAACqM,SAAD,EAAe;AACnD,YAAM9E,QAAQmgB,sBAAsBrb,SAAtB,CAAd;AACA1G,eAAOzF,IAAP,CAAYuf,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAZ;AACA8hB,sBAAcvnB,IAAd,CAAmBqH,MAAM5B,MAAN,GAAexG,IAAlC;AACH,KAJD;;AAMA;;;;;;;AAOA,aAAS0oB,iBAAT,CAA2B3C,EAA3B,EAA+BvY,SAA/B,EAA0Cmb,OAA1C,EAAmD;AAC/CvZ,wFAAkBA,CAAC2W,GAAGnc,WAAtB,EAAmC,UAAC3J,CAAD,EAAO;AACtC,gBAAMmnB,QAAQ,EAAd;AACA,gBAAIwB,WAAW,EAAf;AACAN,0BAAcznB,OAAd,CAAsB,UAACgoB,UAAD,EAAgB;AAClC,oBAAMzqB,QAAQoP,UAAUqb,UAAV,EAAsBtb,YAAtB,CAAmChH,IAAnC,CAAwCtG,CAAxC,CAAd;AACA2oB,kCAAgBxqB,KAAhB;AACAgpB,sBAAMyB,UAAN,IAAoBzqB,KAApB;AACH,aAJD;AAKA,gBAAI,CAACiqB,UAAUO,QAAV,CAAL,EAA0B;AACtB,oBAAID,OAAJ,EAAa;AAAEpiB,yBAAKxF,IAAL,CAAUqmB,KAAV;AAAmB;AAClCiB,0BAAUO,QAAV,IAAsB,IAAtB;AACH;AACJ,SAZD;AAaH;;AAED;AACAF,sBAAkBpC,GAAlB,EAAuBkC,qBAAvB,EAA8C,KAA9C;AACAE,sBAAkBrC,GAAlB,EAAuBkC,qBAAvB,EAA8C,IAA9C;;AAEA,WAAO,IAAIxf,kDAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;ACjED;AAAA;AAAA;;;;;;;;AAQO,SAAS+mB,eAAT,CAA0B+B,GAA1B,EAA+BC,GAA/B,EAAoC;AACvC,QAAMC,SAAS,EAAf;AACA,QAAMC,SAAS,EAAf;AACAH,QAAI3gB,MAAJ,CAAWtH,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B6gB,eAAOloB,IAAP,CAAYqH,MAAM5B,MAAN,GAAexG,IAA3B;AACH,KAFD;AAGA+oB,QAAI5gB,MAAJ,CAAWtH,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B,YAAI6gB,OAAOze,OAAP,CAAepC,MAAM5B,MAAN,GAAexG,IAA9B,MAAwC,CAAC,CAA7C,EAAgD;AAC5CgpB,mBAAOjoB,IAAP,CAAYqH,MAAM5B,MAAN,GAAexG,IAA3B;AACH;AACJ,KAJD;AAKA,WAAOgpB,MAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;;;;;ACpBD;AACA;AACA;;IAEQzV,G,GAAgDD,yD,CAAhDC,G;IAAKC,G,GAA2CF,yD,CAA3CE,G;IAAKG,K,GAAsCL,yD,CAAtCK,K;IAAOC,I,GAA+BN,yD,CAA/BM,I;IAAMC,K,GAAyBP,yD,CAAzBO,K;IAAOC,G,GAAkBR,yD,CAAlBQ,G;IAAKL,G,GAAaH,yD,CAAbG,G;IAAKC,G,GAAQJ,yD,CAARI,G;;;AAEhD,SAASwV,iBAAT,CAA2B7hB,GAA3B,EAAgC;AAC5B,WAAOA,IAAIwS,MAAJ,CAAW;AAAA,eAAQ,EAAEhR,gBAAgBgK,4DAAlB,CAAR;AAAA,KAAX,CAAP;AACH;AACD;;;;;;;AAOA,SAASsW,GAAT,CAAc9hB,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C,YAAMgoB,iBAAiBF,kBAAkB7hB,GAAlB,CAAvB;AACA,YAAMgiB,WAAWD,eAAejoB,MAAf,GACGioB,eAAephB,MAAf,CAAsB,UAACC,GAAD,EAAMqhB,IAAN;AAAA,mBAAerhB,MAAMqhB,IAArB;AAAA,SAAtB,EAAiD,CAAjD,CADH,GAEKzW,4DAAiBA,CAAC8R,IAFxC;AAGA,eAAO0E,QAAP;AACH;AACD,WAAOxW,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS4E,GAAT,CAAcliB,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C,YAAMioB,WAAWF,IAAI9hB,GAAJ,CAAjB;AACA,YAAMwW,MAAMxW,IAAIlG,MAAJ,IAAc,CAA1B;AACA,eAAQgK,OAAO3M,KAAP,CAAa6qB,QAAb,KAA0BA,oBAAoBxW,4DAA/C,GACEA,4DAAiBA,CAAC8R,IADpB,GAC2B0E,WAAWxL,GAD7C;AAEH;AACD,WAAOhL,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS7N,GAAT,CAAczP,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C;AACA,YAAMooB,iBAAiBN,kBAAkB7hB,GAAlB,CAAvB;;AAEA,eAAQmiB,eAAeroB,MAAhB,GAA0BkZ,KAAKvD,GAAL,gCAAY0S,cAAZ,EAA1B,GAAwD3W,4DAAiBA,CAAC8R,IAAjF;AACH;AACD,WAAO9R,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS3N,GAAT,CAAc3P,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C;AACA,YAAMooB,iBAAiBN,kBAAkB7hB,GAAlB,CAAvB;;AAEA,eAAQmiB,eAAeroB,MAAhB,GAA0BkZ,KAAKrD,GAAL,gCAAYwS,cAAZ,EAA1B,GAAwD3W,4DAAiBA,CAAC8R,IAAjF;AACH;AACD,WAAO9R,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS8E,KAAT,CAAgBpiB,GAAhB,EAAqB;AACjB,WAAOA,IAAI,CAAJ,CAAP;AACH;;AAED;;;;;;;AAOA,SAASqiB,IAAT,CAAeriB,GAAf,EAAoB;AAChB,WAAOA,IAAIA,IAAIlG,MAAJ,GAAa,CAAjB,CAAP;AACH;;AAED;;;;;;;AAOA,SAASwoB,KAAT,CAAgBtiB,GAAhB,EAAqB;AACjB,QAAIC,sDAAOA,CAACD,GAAR,CAAJ,EAAkB;AACd,eAAOA,IAAIlG,MAAX;AACH;AACD,WAAO0R,4DAAiBA,CAAC8R,IAAzB;AACH;;AAED;;;;;;AAMA,SAASiF,QAAT,CAAmBviB,GAAnB,EAAwB;AACpB,QAAIwiB,OAAON,IAAIliB,GAAJ,CAAX;AACA,WAAOkiB,IAAIliB,IAAItH,GAAJ,CAAQ;AAAA,wBAAQ+pB,MAAMD,IAAd,EAAuB,CAAvB;AAAA,KAAR,CAAJ,CAAP;AACH;;AAED;;;;;;;AAOA,SAASE,GAAT,CAAc1iB,GAAd,EAAmB;AACf,WAAOgT,KAAK2P,IAAL,CAAUJ,SAASviB,GAAT,CAAV,CAAP;AACH;;AAGD,IAAM4iB,iDACD1W,GADC,EACK4V,GADL,4BAED3V,GAFC,EAEK+V,GAFL,4BAGD9V,GAHC,EAGKqD,GAHL,4BAIDpD,GAJC,EAIKsD,GAJL,4BAKDrD,KALC,EAKO8V,KALP,4BAMD7V,IANC,EAMM8V,IANN,4BAOD7V,KAPC,EAOO8V,KAPP,4BAQD7V,GARC,EAQKiW,GARL,WAAN;;AAWA,IAAMzR,qBAAqB/E,GAA3B;;;;;;;;;;;;;;;;;;;;;;;;;;ACpJA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AAOA,SAAS2W,WAAT,CAAsB3I,SAAtB,EAAiC3L,QAAjC,EAA2C;AACvC,QAAMoT,SAAS,EAAf;AACA,QAAMtT,aAAa6L,UAAU5U,aAAV,EAAnB;AACA,QAAMwd,aAAazU,WAAWO,YAAX,EAAnB;;AAEAtV,WAAOga,OAAP,CAAewP,UAAf,EAA2BtpB,OAA3B,CAAmC,gBAAW;AAAA;AAAA,YAAT1C,GAAS;;AAC1C,YAAIyX,YAAYA,SAASzU,MAAzB,EAAiC;AAC7B,gBAAIyU,SAASpL,OAAT,CAAiBrM,GAAjB,MAA0B,CAAC,CAA/B,EAAkC;AAC9B6qB,uBAAOjoB,IAAP,CAAY5C,GAAZ;AACH;AACJ,SAJD,MAIO;AACH6qB,mBAAOjoB,IAAP,CAAY5C,GAAZ;AACH;AACJ,KARD;;AAUA,WAAO6qB,MAAP;AACH;;AAED;;;;;;;AAOA,SAASoB,aAAT,CAAwB7I,SAAxB,EAAkD;AAAA,QAAf9V,QAAe,uEAAJ,EAAI;;AAC9C,QAAMoc,SAAS,EAAf;AACA,QAAMnS,aAAa6L,UAAU5U,aAAV,EAAnB;AACA,QAAM0d,WAAW3U,WAAWK,UAAX,EAAjB;AACA,QAAMuU,aAAape,4DAAYA,CAACD,cAAb,EAAnB;;AAEAtL,WAAOmI,IAAP,CAAYuhB,QAAZ,EAAsBxpB,OAAtB,CAA8B,UAAC0pB,WAAD,EAAiB;AAC3C,YAAI,OAAO9e,SAAS8e,WAAT,CAAP,KAAiC,QAArC,EAA+C;AAC3C9e,qBAAS8e,WAAT,IAAwBF,SAASE,WAAT,EAAsBlS,QAAtB,EAAxB;AACH;AACD,YAAMtG,YAAY7F,4DAAYA,CAACse,OAAb,CAAqB/e,SAAS8e,WAAT,CAArB,CAAlB;AACA,YAAIxY,SAAJ,EAAe;AACX8V,mBAAO0C,WAAP,IAAsBxY,SAAtB;AACH,SAFD,MAEO;AACH8V,mBAAO0C,WAAP,IAAsBD,UAAtB;AACA7e,qBAAS8e,WAAT,IAAwBjS,qEAAxB;AACH;AACJ,KAXD;AAYA,WAAOuP,MAAP;AACH;;AAED;;;;;;;;;AASA,SAAS9b,OAAT,CAAkBwV,SAAlB,EAA6B3L,QAA7B,EAAuCnK,QAAvC,EAAiDgf,iBAAjD,EAAoE;AAChE,QAAMC,YAAYR,YAAY3I,SAAZ,EAAuB3L,QAAvB,CAAlB;AACA,QAAM+U,aAAaP,cAAc7I,SAAd,EAAyB9V,QAAzB,CAAnB;AACA,QAAMiK,aAAa6L,UAAU5U,aAAV,EAAnB;AACA,QAAMuQ,gBAAgBxH,WAAWlI,SAAX,EAAtB;AACA,QAAMod,SAASlV,WAAW1V,IAA1B;AACA,QAAM8R,eAAe,EAArB;AACA,QAAM+Y,aAAa,EAAnB;AACA,QAAMrkB,SAAS,EAAf;AACA,QAAMskB,UAAU,EAAhB;AACA,QAAMvkB,OAAO,EAAb;AACA,QAAIuF,qBAAJ;;AAEA;AACAnL,WAAOga,OAAP,CAAeuC,aAAf,EAA8Brc,OAA9B,CAAsC,iBAAkB;AAAA;AAAA,YAAhB1C,GAAgB;AAAA,YAAXC,KAAW;;AACpD,YAAIssB,UAAUlgB,OAAV,CAAkBrM,GAAlB,MAA2B,CAAC,CAA5B,IAAiCwsB,WAAWxsB,GAAX,CAArC,EAAsD;AAClDqI,mBAAOzF,IAAP,CAAYuf,sDAAOA,CAAC,EAAR,EAAYliB,MAAMoI,MAAN,EAAZ,CAAZ;;AAEA,oBAAQpI,MAAMoI,MAAN,GAAeV,IAAvB;AACA,qBAAKyL,gDAASA,CAAC4B,OAAf;AACI0X,+BAAW9pB,IAAX,CAAgB5C,GAAhB;AACA;AACJ;AACA,qBAAKoT,gDAASA,CAACC,SAAf;AACIM,iCAAa/Q,IAAb,CAAkB5C,GAAlB;AANJ;AAQH;AACJ,KAbD;AAcA;AACA,QAAI4sB,WAAW,CAAf;AACA3b,oFAAkBA,CAACmS,UAAU3X,WAA7B,EAA0C,UAAC3J,CAAD,EAAO;AAC7C,YAAIuW,OAAO,EAAX;AACA1E,qBAAajR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxBiL,mBAAUA,IAAV,SAAkB0G,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAlB;AACH,SAFD;AAGA,YAAI6qB,QAAQtU,IAAR,MAAkB1L,SAAtB,EAAiC;AAC7BggB,oBAAQtU,IAAR,IAAgBuU,QAAhB;AACAxkB,iBAAKxF,IAAL,CAAU,EAAV;AACA+Q,yBAAajR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxBhF,qBAAKwkB,QAAL,EAAexf,CAAf,IAAoB2R,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAApB;AACH,aAFD;AAGA4qB,uBAAWhqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBhF,qBAAKwkB,QAAL,EAAexf,CAAf,IAAoB,CAAC2R,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAD,CAApB;AACH,aAFD;AAGA8qB,wBAAY,CAAZ;AACH,SAVD,MAUO;AACHF,uBAAWhqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBhF,qBAAKukB,QAAQtU,IAAR,CAAL,EAAoBjL,CAApB,EAAuBxK,IAAvB,CAA4Bmc,cAAc3R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAA5B;AACH,aAFD;AAGH;AACJ,KApBD;;AAsBA;AACA,QAAI+O,cAAc,EAAlB;AACA,QAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,eAAMsS,UAAUrS,YAAV,EAAN;AAAA,KAApB;AACA3I,SAAK1F,OAAL,CAAa,UAACN,GAAD,EAAS;AAClB,YAAM6mB,QAAQ7mB,GAAd;AACAsqB,mBAAWhqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtB6b,kBAAM7b,CAAN,IAAWof,WAAWpf,CAAX,EAAchL,IAAIgL,CAAJ,CAAd,EAAsB0D,aAAtB,EAAqCD,WAArC,CAAX;AACH,SAFD;AAGH,KALD;AAMA,QAAIyb,iBAAJ,EAAuB;AACnBA,0BAAkBxc,qBAAlB;AACAnC,uBAAe2e,iBAAf;AACH,KAHD,MAIK;AACD3e,uBAAe,IAAI/C,+CAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,MAAM4qB,MAAR,EAA5B,CAAf;AACH;AACD,WAAO9e,YAAP;AACH;;;;;;;;;;;;;;AC1ID;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;ACbA;AAAA;AAAA;;;;;;;AAOA,SAASkf,SAAT,CAAoBlR,CAApB,EAAuBC,CAAvB,EAA0B;AACtB,QAAMkR,UAAQnR,CAAd;AACA,QAAMoR,UAAQnR,CAAd;AACA,QAAIkR,KAAKC,EAAT,EAAa;AACT,eAAO,CAAC,CAAR;AACH;AACD,QAAID,KAAKC,EAAT,EAAa;AACT,eAAO,CAAP;AACH;AACD,WAAO,CAAP;AACH;;AAED;;;;;;;;;;AAUA,SAASC,KAAT,CAAgB9jB,GAAhB,EAAqB+jB,EAArB,EAAyBC,GAAzB,EAA8BC,EAA9B,EAAkCC,MAAlC,EAA0C;AACtC,QAAMC,UAAUnkB,GAAhB;AACA,QAAMokB,SAAS,EAAf;AACA,SAAK,IAAIxrB,IAAImrB,EAAb,EAAiBnrB,KAAKqrB,EAAtB,EAA0BrrB,KAAK,CAA/B,EAAkC;AAC9BwrB,eAAOxrB,CAAP,IAAYurB,QAAQvrB,CAAR,CAAZ;AACH;AACD,QAAI6Z,IAAIsR,EAAR;AACA,QAAIrR,IAAIsR,MAAM,CAAd;;AAEA,SAAK,IAAIprB,KAAImrB,EAAb,EAAiBnrB,MAAKqrB,EAAtB,EAA0BrrB,MAAK,CAA/B,EAAkC;AAC9B,YAAI6Z,IAAIuR,GAAR,EAAa;AACTG,oBAAQvrB,EAAR,IAAawrB,OAAO1R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHD,MAGO,IAAIA,IAAIuR,EAAR,EAAY;AACfE,oBAAQvrB,EAAR,IAAawrB,OAAO3R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHM,MAGA,IAAIyR,OAAOE,OAAO3R,CAAP,CAAP,EAAkB2R,OAAO1R,CAAP,CAAlB,KAAgC,CAApC,EAAuC;AAC1CyR,oBAAQvrB,EAAR,IAAawrB,OAAO3R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHM,MAGA;AACH0R,oBAAQvrB,EAAR,IAAawrB,OAAO1R,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH;AACJ;AACJ;;AAED;;;;;;;;;;AAUA,SAASxQ,IAAT,CAAelC,GAAf,EAAoB+jB,EAApB,EAAwBE,EAAxB,EAA4BC,MAA5B,EAAoC;AAChC,QAAID,OAAOF,EAAX,EAAe;AAAE,eAAO/jB,GAAP;AAAa;;AAE9B,QAAMgkB,MAAMD,KAAK/Q,KAAKmL,KAAL,CAAW,CAAC8F,KAAKF,EAAN,IAAY,CAAvB,CAAjB;AACA7hB,SAAKlC,GAAL,EAAU+jB,EAAV,EAAcC,GAAd,EAAmBE,MAAnB;AACAhiB,SAAKlC,GAAL,EAAUgkB,MAAM,CAAhB,EAAmBC,EAAnB,EAAuBC,MAAvB;AACAJ,UAAM9jB,GAAN,EAAW+jB,EAAX,EAAeC,GAAf,EAAoBC,EAApB,EAAwBC,MAAxB;;AAEA,WAAOlkB,GAAP;AACH;;AAED;;;;;;;;;AASO,SAASqkB,SAAT,CAAoBrkB,GAApB,EAA6C;AAAA,QAApBkkB,MAAoB,uEAAXP,SAAW;;AAChD,QAAI3jB,IAAIlG,MAAJ,GAAa,CAAjB,EAAoB;AAChBoI,aAAKlC,GAAL,EAAU,CAAV,EAAaA,IAAIlG,MAAJ,GAAa,CAA1B,EAA6BoqB,MAA7B;AACH;AACD,WAAOlkB,GAAP;AACH,C;;;;;;;;;;;;AC1FD;AAAA;AAAA;AAAA;;AAEA;;;;;;;;;AASO,SAASskB,iBAAT,CAA4BtF,GAA5B,EAAiCC,GAAjC,EAAsC;AACzC,QAAMI,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA;AACA;AACA,QAAMif,kBAAkB7E,0EAAeA,CAACL,aAAhB,EAA+BC,aAA/B,CAAxB;;AAEA,WAAO,UAACa,SAAD,EAAYC,SAAZ,EAA0B;AAC7B,YAAIoE,cAAc,IAAlB;AACAD,wBAAgB/qB,OAAhB,CAAwB,UAACqM,SAAD,EAAe;AACnC,gBAAIsa,UAAUta,SAAV,EAAqB8Q,aAArB,KACAyJ,UAAUva,SAAV,EAAqB8Q,aADrB,IACsC6N,WAD1C,EACuD;AACnDA,8BAAc,IAAd;AACH,aAHD,MAGO;AACHA,8BAAc,KAAd;AACH;AACJ,SAPD;AAQA,eAAOA,WAAP;AACH,KAXD;AAYH,C;;;;;;;;;;;;AC9BD;AAAA;AAAA;AAAA;AAAA;AACA;;AAEO,SAASpX,WAAT,CAAsBqX,UAAtB,EAAkCC,UAAlC,EAA8C;AACjD,WAAOxX,mEAAYA,CAACuX,UAAb,EAAyBC,UAAzB,EAAqCJ,uFAAiBA,CAACG,UAAlB,EAA8BC,UAA9B,CAArC,EAAgF,IAAhF,CAAP;AACH,C;;;;;;;;;;;;ACLD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;AAGO,SAASrX,aAAT,CAAwBoX,UAAxB,EAAoCC,UAApC,EAAgDnJ,QAAhD,EAA0D;AAC7D,WAAOrO,mEAAYA,CAACuX,UAAb,EAAyBC,UAAzB,EAAqCnJ,QAArC,EAA+C,KAA/C,EAAsDle,gDAAKA,CAACE,SAA5D,CAAP;AACH;;AAEM,SAAS+P,cAAT,CAAyBmX,UAAzB,EAAqCC,UAArC,EAAiDnJ,QAAjD,EAA2D;AAC9D,WAAOrO,mEAAYA,CAACwX,UAAb,EAAyBD,UAAzB,EAAqClJ,QAArC,EAA+C,KAA/C,EAAsDle,gDAAKA,CAACG,UAA5D,CAAP;AACH;;AAEM,SAAS+P,aAAT,CAAwBkX,UAAxB,EAAoCC,UAApC,EAAgDnJ,QAAhD,EAA0D;AAC7D,WAAO/N,oDAAKA,CAACH,cAAcoX,UAAd,EAA0BC,UAA1B,EAAsCnJ,QAAtC,CAAN,EAAuDjO,eAAemX,UAAf,EAA2BC,UAA3B,EAAuCnJ,QAAvC,CAAvD,CAAP;AACH,C;;;;;;;;;;;;ACfD;AAAA;AAAA;AAAA;;;;;;;AAOO,IAAMtO,oBAAoB,SAApBA,iBAAoB;AAAA,oCAAItL,IAAJ;AAAIA,QAAJ;AAAA;;AAAA,SAAa;AAAA,WAAM+c,GAAGzR,iBAAH,WAAwBtL,IAAxB,CAAN;AAAA,GAAb;AAAA,CAA1B;;AAEP;;;;;;;AAOO,IAAMO,OAAO,SAAPA,IAAO;AAAA,qCAAIP,IAAJ;AAAIA,QAAJ;AAAA;;AAAA,SAAa;AAAA,WAAM+c,GAAGxc,IAAH,WAAWP,IAAX,CAAN;AAAA,GAAb;AAAA,CAAb,C;;;;;;;;;;;;AChBP;AAAA;AAAA;;;;;;;AAOO,SAASoG,kBAAT,CAA6BrE,UAA7B,EAAyC6F,QAAzC,EAAmD;AACtD,QAAI7F,WAAW5J,MAAX,GAAoB,CAAxB,EAA2B;AACvB,YAAM6qB,aAAajhB,WAAWG,KAAX,CAAiB,GAAjB,CAAnB;AACA8gB,mBAAWnrB,OAAX,CAAmB,UAACorB,OAAD,EAAa;AAC5B,gBAAMC,aAAaD,QAAQ/gB,KAAR,CAAc,GAAd,CAAnB;AACA,gBAAME,QAAQ,CAAE8gB,WAAW,CAAX,CAAhB;AACA,gBAAM7gB,MAAM,EAAE6gB,WAAW,CAAX,KAAiBA,WAAW,CAAX,CAAnB,CAAZ;AACA,gBAAI7gB,OAAOD,KAAX,EAAkB;AACd,qBAAK,IAAInL,IAAImL,KAAb,EAAoBnL,KAAKoL,GAAzB,EAA8BpL,KAAK,CAAnC,EAAsC;AAClC2Q,6BAAS3Q,CAAT;AACH;AACJ;AACJ,SATD;AAUH;AACJ,C;;;;;;;;;;;;;;;;;;;;;;ACrBD;AACA;AACA;AACA;;AAEA;;;;;;;;AAQA,SAASksB,SAAT,CAAoBC,QAApB,EAA8BC,QAA9B,EAAwC;AACpC,QAAIC,gBAAJ;;AAEA,YAAQF,QAAR;AACA,aAAKrY,qDAAcA,CAACC,UAApB;AACA,aAAKtC,uDAAgBA,CAACwB,QAAtB;AACI,gBAAImZ,aAAa,KAAjB,EAAwB;AACpBC,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ;AAAA,2BAAUD,IAAIC,CAAd;AAAA,iBAAV;AACH,aAFD,MAEO;AACHuS,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ;AAAA,2BAAUA,IAAID,CAAd;AAAA,iBAAV;AACH;AACD;AACJ;AACI,gBAAIuS,aAAa,KAAjB,EAAwB;AACpBC,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ,EAAU;AAChBD,6BAAOA,CAAP;AACAC,6BAAOA,CAAP;AACA,wBAAID,MAAMC,CAAV,EAAa;AACT,+BAAO,CAAP;AACH;AACD,2BAAOD,IAAIC,CAAJ,GAAQ,CAAR,GAAY,CAAC,CAApB;AACH,iBAPD;AAQH,aATD,MASO;AACHuS,0BAAU,iBAACxS,CAAD,EAAIC,CAAJ,EAAU;AAChBD,6BAAOA,CAAP;AACAC,6BAAOA,CAAP;AACA,wBAAID,MAAMC,CAAV,EAAa;AACT,+BAAO,CAAP;AACH;AACD,2BAAOD,IAAIC,CAAJ,GAAQ,CAAC,CAAT,GAAa,CAApB;AACH,iBAPD;AAQH;AA5BL;;AA+BA,WAAOuS,OAAP;AACH;;AAED;;;;;;;AAOA,SAASC,mBAAT,CAA8BC,QAA9B,EAAwCC,YAAxC,EAAsD;AAClD,QAAMC,YAAY/T,OAAO8T,YAAP,EAAqBE,WAArB,OAAuC,MAAvC,GAAgD,MAAhD,GAAyD,KAA3E;AACA,WAAOR,UAAUK,SAAS1mB,IAAnB,EAAyB4mB,SAAzB,CAAP;AACH;;AAED;;;;;;;AAOA,SAASE,SAAT,CAAoBrmB,IAApB,EAA0BoH,UAA1B,EAAsC;AAClC,QAAMmd,UAAU,IAAIzlB,GAAJ,EAAhB;AACA,QAAMwnB,cAAc,EAApB;;AAEAtmB,SAAK1F,OAAL,CAAa,UAAC+J,KAAD,EAAW;AACpB,YAAMkiB,WAAWliB,MAAM+C,UAAN,CAAjB;AACA,YAAImd,QAAQ7kB,GAAR,CAAY6mB,QAAZ,CAAJ,EAA2B;AACvBD,wBAAY/B,QAAQ5kB,GAAR,CAAY4mB,QAAZ,CAAZ,EAAmC,CAAnC,EAAsC/rB,IAAtC,CAA2C6J,KAA3C;AACH,SAFD,MAEO;AACHiiB,wBAAY9rB,IAAZ,CAAiB,CAAC+rB,QAAD,EAAW,CAACliB,KAAD,CAAX,CAAjB;AACAkgB,oBAAQllB,GAAR,CAAYknB,QAAZ,EAAsBD,YAAY1rB,MAAZ,GAAqB,CAA3C;AACH;AACJ,KARD;;AAUA,WAAO0rB,WAAP;AACH;;AAED;;;;;;;;;AASA,SAASE,kBAAT,CAA6BC,YAA7B,EAA2CC,YAA3C,EAAyDC,kBAAzD,EAA6E;AACzE,QAAMC,MAAM;AACRC,eAAOJ,aAAa,CAAb;AADC,KAAZ;;AAIAC,iBAAajlB,MAAb,CAAoB,UAACC,GAAD,EAAMqC,IAAN,EAAYC,GAAZ,EAAoB;AACpCtC,YAAIqC,IAAJ,IAAY0iB,aAAa,CAAb,EAAgBjtB,GAAhB,CAAoB;AAAA,mBAAS6K,MAAMsiB,mBAAmB3iB,GAAnB,EAAwBqE,KAA9B,CAAT;AAAA,SAApB,CAAZ;AACA,eAAO3G,GAAP;AACH,KAHD,EAGGklB,GAHH;;AAKA,WAAOA,GAAP;AACH;;AAED;;;;;;;AAOA,SAASE,iBAAT,CAA4B9mB,IAA5B,EAAkCC,MAAlC,EAA0C4F,cAA1C,EAA0D;AACtD,QAAIc,kBAAJ;AACA,QAAIogB,iBAAJ;AACA,QAAId,iBAAJ;AACA,QAAIvsB,IAAImM,eAAejL,MAAf,GAAwB,CAAhC;;AAEA,WAAOlB,KAAK,CAAZ,EAAeA,GAAf,EAAoB;AAChBiN,oBAAYd,eAAenM,CAAf,EAAkB,CAAlB,CAAZ;AACAqtB,mBAAWlhB,eAAenM,CAAf,EAAkB,CAAlB,CAAX;AACAusB,mBAAWpL,6DAAaA,CAAC5a,MAAd,EAAsB0G,SAAtB,CAAX;;AAEA,YAAI,CAACsf,QAAL,EAAe;AACX;AACA;AACH;;AAED,YAAIe,yDAAUA,CAACD,QAAX,CAAJ,EAA0B;AACtB;AACA5B,yEAASA,CAACnlB,IAAV,EAAgB,UAACuT,CAAD,EAAIC,CAAJ;AAAA,uBAAUuT,SAASxT,EAAE0S,SAAS5d,KAAX,CAAT,EAA4BmL,EAAEyS,SAAS5d,KAAX,CAA5B,CAAV;AAAA,aAAhB;AACH,SAHD,MAGO,IAAItH,sDAAOA,CAACgmB,QAAR,CAAJ,EAAuB;AAAA;AAC1B,oBAAMT,cAAcD,UAAUrmB,IAAV,EAAgBimB,SAAS5d,KAAzB,CAApB;AACA,oBAAM4e,YAAYF,SAASA,SAASnsB,MAAT,GAAkB,CAA3B,CAAlB;AACA,oBAAM8rB,eAAeK,SAASlqB,KAAT,CAAe,CAAf,EAAkBkqB,SAASnsB,MAAT,GAAkB,CAApC,CAArB;AACA,oBAAM+rB,qBAAqBD,aAAaltB,GAAb,CAAiB;AAAA,2BAAKqhB,6DAAaA,CAAC5a,MAAd,EAAsBlG,CAAtB,CAAL;AAAA,iBAAjB,CAA3B;;AAEAusB,4BAAYhsB,OAAZ,CAAoB,UAACmsB,YAAD,EAAkB;AAClCA,iCAAajsB,IAAb,CAAkBgsB,mBAAmBC,YAAnB,EAAiCC,YAAjC,EAA+CC,kBAA/C,CAAlB;AACH,iBAFD;;AAIAxB,6EAASA,CAACmB,WAAV,EAAuB,UAAC/S,CAAD,EAAIC,CAAJ,EAAU;AAC7B,wBAAM0T,IAAI3T,EAAE,CAAF,CAAV;AACA,wBAAMjX,IAAIkX,EAAE,CAAF,CAAV;AACA,2BAAOyT,UAAUC,CAAV,EAAa5qB,CAAb,CAAP;AACH,iBAJD;;AAMA;AACA0D,qBAAKpF,MAAL,GAAc,CAAd;AACA0rB,4BAAYhsB,OAAZ,CAAoB,UAAC+J,KAAD,EAAW;AAC3BrE,yBAAKxF,IAAL,gCAAa6J,MAAM,CAAN,CAAb;AACH,iBAFD;AAlB0B;AAqB7B,SArBM,MAqBA;AAAA;AACH,oBAAM2gB,SAASgB,oBAAoBC,QAApB,EAA8Bc,QAA9B,CAAf;AACA;AACA5B,6EAASA,CAACnlB,IAAV,EAAgB,UAACuT,CAAD,EAAIC,CAAJ;AAAA,2BAAUwR,OAAOzR,EAAE0S,SAAS5d,KAAX,CAAP,EAA0BmL,EAAEyS,SAAS5d,KAAX,CAA1B,CAAV;AAAA,iBAAhB;AAHG;AAIN;AACJ;AACJ;;AAED;;;;;;;;;AASA,IAAM8e,sBAAsB,SAAtBA,mBAAsB,CAACC,UAAD,EAAapnB,IAAb,EAAmBC,MAAnB,EAA2B4F,cAA3B,EAA8C;AACtE,QAAIuhB,WAAWxsB,MAAX,KAAsB,CAA1B,EAA6B;AAAE,eAAOoF,IAAP;AAAc;;AAE7C,QAAMqnB,YAAYD,WAAW,CAAX,CAAlB;AACA,QAAM5tB,MAAM,IAAIsF,GAAJ,EAAZ;;AAEAkB,SAAKyB,MAAL,CAAY,UAACC,GAAD,EAAM4lB,OAAN,EAAkB;AAC1B,YAAMC,OAAOD,QAAQD,UAAUhf,KAAlB,CAAb;AACA,YAAI3G,IAAIhC,GAAJ,CAAQ6nB,IAAR,CAAJ,EAAmB;AACf7lB,gBAAI/B,GAAJ,CAAQ4nB,IAAR,EAAc/sB,IAAd,CAAmB8sB,OAAnB;AACH,SAFD,MAEO;AACH5lB,gBAAIrC,GAAJ,CAAQkoB,IAAR,EAAc,CAACD,OAAD,CAAd;AACH;AACD,eAAO5lB,GAAP;AACH,KARD,EAQGlI,GARH;;AANsE;AAAA;AAAA;;AAAA;AAgBtE,6BAAuBA,GAAvB,8HAA4B;AAAA;;AAAA;;AAAA,gBAAlB5B,GAAkB;AAAA,gBAAbsa,GAAa;;AACxB,gBAAMsV,OAAOL,oBAAoBC,WAAWvqB,KAAX,CAAiB,CAAjB,CAApB,EAAyCqV,GAAzC,EAA8CjS,MAA9C,EAAsD4F,cAAtD,CAAb;AACArM,gBAAI6F,GAAJ,CAAQzH,GAAR,EAAa4vB,IAAb;AACA,gBAAI3sB,MAAMkG,OAAN,CAAcymB,IAAd,CAAJ,EAAyB;AACrBV,kCAAkBU,IAAlB,EAAwBvnB,MAAxB,EAAgC4F,cAAhC;AACH;AACJ;AAtBqE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAwBtE,WAAOrM,GAAP;AACH,CAzBD;;AA2BA;;;;;;;;;AASA,SAASiuB,cAAT,CAAyBznB,IAAzB,EAA+BC,MAA/B,EAAuC4F,cAAvC,EAAuDuhB,UAAvD,EAAmE;AAC/DvhB,qBAAiBA,eAAeyN,MAAf,CAAsB,UAACoU,MAAD,EAAY;AAC/C,YAAIA,OAAO,CAAP,MAAc,IAAlB,EAAwB;AACpBN,uBAAW5sB,IAAX,CAAgBktB,OAAO,CAAP,CAAhB;AACA,mBAAO,KAAP;AACH;AACD,eAAO,IAAP;AACH,KANgB,CAAjB;AAOA,QAAI7hB,eAAejL,MAAf,KAA0B,CAA9B,EAAiC;AAAE,eAAOoF,IAAP;AAAc;;AAEjDonB,iBAAaA,WAAW5tB,GAAX,CAAe;AAAA,eAAKqhB,6DAAaA,CAAC5a,MAAd,EAAsBrD,CAAtB,CAAL;AAAA,KAAf,CAAb;;AAEA,QAAM+qB,iBAAiBR,oBAAoBC,UAApB,EAAgCpnB,IAAhC,EAAsCC,MAAtC,EAA8C4F,cAA9C,CAAvB;AACA,WAAO7F,KAAKxG,GAAL,CAAS,UAACQ,GAAD,EAAS;AACrB,YAAIN,IAAI,CAAR;AACA,YAAIkuB,UAAUD,cAAd;;AAEA,eAAO,CAAC9sB,MAAMkG,OAAN,CAAc6mB,OAAd,CAAR,EAAgC;AAC5BA,sBAAUA,QAAQjoB,GAAR,CAAY3F,IAAIotB,WAAW1tB,GAAX,EAAgB2O,KAApB,CAAZ,CAAV;AACH;;AAED,eAAOuf,QAAQC,KAAR,EAAP;AACH,KATM,CAAP;AAUH;;AAED;;;;;;AAMO,SAASjG,QAAT,CAAmBkG,OAAnB,EAA4BjiB,cAA5B,EAA4C;AAAA,QACzC5F,MADyC,GACxB6nB,OADwB,CACzC7nB,MADyC;AAAA,QACjCD,IADiC,GACxB8nB,OADwB,CACjC9nB,IADiC;;;AAG/C6F,qBAAiBA,eAAeyN,MAAf,CAAsB;AAAA,eAAW,CAAC,CAACuH,6DAAaA,CAAC5a,MAAd,EAAsB8nB,QAAQ,CAAR,CAAtB,CAAb;AAAA,KAAtB,CAAjB;AACA,QAAIliB,eAAejL,MAAf,KAA0B,CAA9B,EAAiC;AAAE;AAAS;;AAE5C,QAAIotB,kBAAkBniB,eAAewB,SAAf,CAAyB;AAAA,eAAW0gB,QAAQ,CAAR,MAAe,IAA1B;AAAA,KAAzB,CAAtB;AACAC,sBAAkBA,oBAAoB,CAAC,CAArB,GAAyBA,eAAzB,GAA2CniB,eAAejL,MAA5E;;AAEA,QAAMqtB,yBAAyBpiB,eAAehJ,KAAf,CAAqB,CAArB,EAAwBmrB,eAAxB,CAA/B;AACA,QAAME,sBAAsBriB,eAAehJ,KAAf,CAAqBmrB,eAArB,CAA5B;;AAEAlB,sBAAkB9mB,IAAlB,EAAwBC,MAAxB,EAAgCgoB,sBAAhC;AACAjoB,WAAOynB,eAAeznB,IAAf,EAAqBC,MAArB,EAA6BioB,mBAA7B,EAAkDD,uBAAuBzuB,GAAvB,CAA2B;AAAA,eAAUkuB,OAAO,CAAP,CAAV;AAAA,KAA3B,CAAlD,CAAP;;AAEAI,YAAQpkB,IAAR,GAAe1D,KAAKxG,GAAL,CAAS;AAAA,eAAOQ,IAAImuB,GAAJ,EAAP;AAAA,KAAT,CAAf;AACAL,YAAQ9nB,IAAR,GAAeA,IAAf;AACH,C;;;;;;;;;;;;AChQD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;;;;;;;AAOO,SAASsO,KAAT,CAAgBwR,GAAhB,EAAqBC,GAArB,EAA0B;AAC7B,QAAM+B,YAAY,EAAlB;AACA,QAAM7hB,SAAS,EAAf;AACA,QAAM8hB,gBAAgB,EAAtB;AACA,QAAM/hB,OAAO,EAAb;AACA,QAAMmgB,gBAAgBL,IAAI1Z,aAAJ,EAAtB;AACA,QAAMga,gBAAgBL,IAAI3Z,aAAJ,EAAtB;AACA,QAAM4b,wBAAwB7B,cAAclZ,SAAd,EAA9B;AACA,QAAMgb,wBAAwB7B,cAAcnZ,SAAd,EAA9B;AACA,QAAMxN,OAAU0mB,cAAc1mB,IAAxB,eAAsC2mB,cAAc3mB,IAA1D;;AAEA;AACA,QAAI,CAACyoB,gEAAUA,CAACpC,IAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAX,EAAiD+c,IAAIxc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAjD,CAAL,EAA6F;AACzF,eAAO,IAAP;AACH;;AAED;AACC8c,QAAIvc,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,CAAD,CAAgCrK,OAAhC,CAAwC,UAACqM,SAAD,EAAe;AACnD,YAAM9E,QAAQmgB,sBAAsBrb,SAAtB,CAAd;AACA1G,eAAOzF,IAAP,CAAYuf,sDAAOA,CAAC,EAAR,EAAYlY,MAAM5B,MAAN,EAAZ,CAAZ;AACA8hB,sBAAcvnB,IAAd,CAAmBqH,MAAM5B,MAAN,GAAexG,IAAlC;AACH,KAJD;;AAMA;;;;;;AAMA,aAAS0oB,iBAAT,CAA4B3C,EAA5B,EAAgCvY,SAAhC,EAA2C;AACvC4B,wFAAkBA,CAAC2W,GAAGnc,WAAtB,EAAmC,UAAC3J,CAAD,EAAO;AACtC,gBAAMmnB,QAAQ,EAAd;AACA,gBAAIwB,WAAW,EAAf;AACAN,0BAAcznB,OAAd,CAAsB,UAACgoB,UAAD,EAAgB;AAClC,oBAAMzqB,QAAQoP,UAAUqb,UAAV,EAAsBtb,YAAtB,CAAmChH,IAAnC,CAAwCtG,CAAxC,CAAd;AACA2oB,kCAAgBxqB,KAAhB;AACAgpB,sBAAMyB,UAAN,IAAoBzqB,KAApB;AACH,aAJD;AAKA,gBAAI,CAACiqB,UAAUO,QAAV,CAAL,EAA0B;AACtBriB,qBAAKxF,IAAL,CAAUqmB,KAAV;AACAiB,0BAAUO,QAAV,IAAsB,IAAtB;AACH;AACJ,SAZD;AAaH;;AAED;AACAF,sBAAkBrC,GAAlB,EAAuBkC,qBAAvB;AACAG,sBAAkBpC,GAAlB,EAAuBkC,qBAAvB;;AAEA,WAAO,IAAIzf,+CAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;AC7DD;AACA;AACA;AAOA;;AAEA;;;;;;;;;;;;IAWMgT,Q;;AAEF;;;;;;;;;;AAUA,wBAAwB;AAAA;;AACpB,YAAI2b,eAAJ;;AAEA,aAAKrM,OAAL,GAAe,IAAf;AACA,aAAK7G,WAAL,GAAmB,EAAnB;AACA,aAAKO,mBAAL,GAA2B,EAA3B;AACA,aAAKoG,SAAL,GAAiB,EAAjB;;AANoB,0CAARvW,MAAQ;AAARA,kBAAQ;AAAA;;AAQpB,YAAIA,OAAO1K,MAAP,KAAkB,CAAlB,IAAwB,CAACwtB,SAAS9iB,OAAO,CAAP,CAAV,aAAgCmH,QAA5D,EAAuE;AACnE;AACA,iBAAKlJ,cAAL,GAAsB6kB,OAAO7kB,cAA7B;AACA,iBAAKF,WAAL,GAAmB+kB,OAAO/kB,WAA1B;AACA,iBAAK8C,WAAL,GAAmBiiB,OAAOjiB,WAA1B;AACA,iBAAK4V,OAAL,GAAeqM,MAAf;AACA,iBAAKvhB,kBAAL,GAA0B,KAAKkV,OAAL,CAAalV,kBAAvC;AACA,iBAAKwhB,eAAL,GAAuB9Y,0DAAWA,EAAlC;AACA,iBAAK7H,qBAAL,GAA6BC,qBAA7B;AACH,SATD,MASO;AACH2S,8DAAUA,mBAAC,IAAX,SAAoBhV,MAApB;AACA,iBAAK+iB,eAAL,GAAuB,KAAKxhB,kBAAL,CAAwBpN,IAA/C;AACA,iBAAKiO,qBAAL,GAA6BC,qBAA7B;AACA,iBAAKgC,qBAAL,GAA6B;AACzB+S,gCAAgB,EADS;AAEzBY,kCAAkB;AAFO,aAA7B;AAIH;AACJ;;AAED;;;;;;;;;;;;;;;;;;;;;;;;oCAoBa;AACT,mBAAO,KAAKlX,aAAL,GAAqBxE,MAArB,CAA4BpI,GAA5B,CAAgC;AAAA,uBAAK8J,EAAErD,MAAF,EAAL;AAAA,aAAhC,CAAP;AACH;;AAED;;;;;;;;;;;kCAQU;AACN,mBAAO,KAAKooB,eAAZ;AACH;;;wCAEgB;AACb,mBAAO,KAAKC,WAAZ;AACH;;;gDAEwB;AACrB,iBAAKA,WAAL,GAAmB9T,4DAAYA,CAAC,CAAC,KAAKnR,WAAN,EAAmB,KAAKE,cAAxB,CAAb,EACd,KAAKN,oBAAL,EADc,EACe,KAAKolB,eADpB,CAAnB;AAEA,mBAAO,IAAP;AACH;;;+CAEuB;AACpB,mBAAO,KAAKxhB,kBAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6BA8BM0hB,Q,EAAUlM,Q,EAAU;AACtB,mBAAOrO,8DAAYA,CAAC,IAAb,EAAmBua,QAAnB,EAA6BlM,QAA7B,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;oCAoBakM,Q,EAAU;AACnB,mBAAOva,8DAAYA,CAAC,IAAb,EAAmBua,QAAnB,EAA6BnD,mEAAiBA,CAAC,IAAlB,EAAwBmD,QAAxB,CAA7B,EAAgE,IAAhE,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;8BAkBOC,S,EAAW;AACd,mBAAOla,wDAAM,IAAN,EAAYka,SAAZ,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;mCAiBYC,c,EAAgB;AACxB,mBAAOxa,6DAAW,IAAX,EAAiBwa,cAAjB,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;+BA+CQxR,Q,EAAU9R,M,EAAQ;AACtB,gBAAMsG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;AAIAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;AACAA,mBAAOuG,IAAP,GAAcvG,OAAOuG,IAAP,IAAeD,UAAUC,IAAvC;;AAEA,gBAAM4N,cAAc,EAAElU,WAAWD,OAAOC,SAApB,EAApB;AACA,mBAAOiU,+DAAeA,CAClB,IADG,EAEHpC,QAFG,EAGH9R,MAHG,EAIHmU,WAJG,CAAP;AAMH;;AAED;;;;;;;;;;;;;;;;;;;;;kCAkBW;AACP,mBAAO,CAAC,KAAKjW,WAAL,CAAiBzI,MAAlB,IAA4B,CAAC,KAAK2I,cAAL,CAAoB3I,MAAxD;AACH;;AAED;;;;;;;;;;gCAOyB;AAAA,gBAAlBwK,SAAkB,uEAAN,IAAM;;AACrB,gBAAM4R,WAAW,IAAI,KAAK9Q,WAAT,CAAqB,IAArB,CAAjB;AACA,gBAAId,SAAJ,EAAe;AACX4R,yBAASpR,SAAT,CAAmB,IAAnB;AACH,aAFD,MAEO;AACHoR,yBAASpR,SAAT,CAAmB,IAAnB;AACH;AACD,mBAAOoR,QAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;gCA2CS0C,S,EAAWvU,M,EAAQ;AACxB,gBAAMsG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdxG,2BAAW;AAFG,aAAlB;AAIAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBqK,SAAlB,EAA6BtG,MAA7B,CAAT;AACA,gBAAM6G,cAAc,KAAKhE,eAAL,EAApB;AACA,gBAAMiE,YAAY7R,OAAOmI,IAAP,CAAYyJ,WAAZ,CAAlB;AAPwB,0BAQP7G,MARO;AAAA,gBAQhBuG,IARgB,WAQhBA,IARgB;;AASxB,gBAAMgS,sBAAsBtR,sEAAsBA,CAACsN,SAAvB,EAAkCzN,SAAlC,EAA6CD,WAA7C,CAA5B;;AAEA,gBAAIgP,kBAAJ;;AAEA,gBAAItP,SAASC,oDAAaA,CAACmB,GAA3B,EAAgC;AAC5B,oBAAI4b,kBAAkBjP,gEAAgBA,CAAC,IAAjB,EAAuBiE,mBAAvB,EAA4C;AAC9DhS,0BAAMC,oDAAaA,CAACC,MAD0C;AAE9DxG,+BAAWD,OAAOC;AAF4C,iBAA5C,EAGnB6G,SAHmB,CAAtB;AAIA,oBAAI0c,iBAAiBlP,gEAAgBA,CAAC,IAAjB,EAAuBiE,mBAAvB,EAA4C;AAC7DhS,0BAAMC,oDAAaA,CAACkB,OADyC;AAE7DzH,+BAAWD,OAAOC;AAF2C,iBAA5C,EAGlB6G,SAHkB,CAArB;AAIA+O,4BAAY,CAAC0N,eAAD,EAAkBC,cAAlB,CAAZ;AACH,aAVD,MAUO;AACH,oBAAID,mBAAkBjP,gEAAgBA,CAAC,IAAjB,EAAuBiE,mBAAvB,EAA4CvY,MAA5C,EAAoD8G,SAApD,CAAtB;AACA+O,4BAAY0N,gBAAZ;AACH;;AAED,mBAAO1N,SAAP;AACH;;;0CAEkB;AACf,mBAAO,KAAK4N,YAAZ;AACH;;;gDAEwB;AACrB,iBAAKA,YAAL,GAAoB,KAAKN,WAAL,CAAiB1mB,MAAjB,CAAwBH,MAAxB,CAA+B,UAACC,GAAD,EAAMmnB,QAAN,EAAgBnvB,CAAhB,EAAsB;AACrEgI,oBAAImnB,SAASpvB,IAAT,EAAJ,IAAuB;AACnB4O,2BAAO3O,CADY;AAEnBmf,yBAAKgQ,SAAS5oB,MAAT;AAFc,iBAAvB;AAIA,uBAAOyB,GAAP;AACH,aANmB,EAMjB,EANiB,CAApB;AAOA,mBAAO,IAAP;AACH;;AAGD;;;;;;;;;kCAMW;AACP,iBAAKqa,OAAL,IAAgB,KAAKA,OAAL,CAAa+M,WAAb,CAAyB,IAAzB,CAAhB;AACA,iBAAK/M,OAAL,GAAe,IAAf;AACA,iBAAKF,SAAL,CAAevhB,OAAf,CAAuB,UAACwhB,KAAD,EAAW;AAC9BA,sBAAMC,OAAN,GAAgB,IAAhB;AACH,aAFD;AAGA,iBAAKF,SAAL,GAAiB,EAAjB;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;oCA0BaC,K,EAAO;AAChB,gBAAI9X,MAAM,KAAK6X,SAAL,CAAexU,SAAf,CAAyB;AAAA,uBAAW0hB,YAAYjN,KAAvB;AAAA,aAAzB,CAAV;AACA9X,oBAAQ,CAAC,CAAT,GAAa,KAAK6X,SAAL,CAAeta,MAAf,CAAsByC,GAAtB,EAA2B,CAA3B,CAAb,GAA6C,IAA7C;AACH;;AAED;;;;;;;;kCAKWglB,M,EAAQ;AACf,iBAAKjN,OAAL,IAAgB,KAAKA,OAAL,CAAa+M,WAAb,CAAyB,IAAzB,CAAhB;AACA,iBAAK/M,OAAL,GAAeiN,MAAf;AACAA,sBAAUA,OAAOnN,SAAP,CAAiBrhB,IAAjB,CAAsB,IAAtB,CAAV;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;oCAwBa;AACT,mBAAO,KAAKuhB,OAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;sCAyBe;AACX,mBAAO,KAAKF,SAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;yCAwBkB;AACd,mBAAO,KAAK3G,WAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;iDAwB0B;AACtB,mBAAO,KAAKO,mBAAZ;AACH;;;;;;AAGUhJ,uEAAf,E;;;;;;;;;;;;ACjkBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;IAEemW,G,GAAoDc,kE,CAApDd,G;IAAKI,G,GAA+CU,kE,CAA/CV,G;IAAKzS,G,GAA0CmT,kE,CAA1CnT,G;IAAKE,G,GAAqCiT,kE,CAArCjT,G;IAAKyS,K,GAAgCQ,kE,CAAhCR,K;IAAOC,I,GAAyBO,kE,CAAzBP,I;IAAMC,K,GAAmBM,kE,CAAnBN,K;IAAY6F,E,GAAOvF,kE,CAAZF,G;;;;;;;;;;;;;ACFvD;AAAA;;;;;;AAMe,yEAAC3kB,KAAD,EAAW;AACtB,QAAInF,IAAI,CAAR;AACA,WAAO,YAAe;AAAA,0CAAXkI,MAAW;AAAXA,kBAAW;AAAA;;AAClBA,eAAOtH,OAAP,CAAe,UAAC4X,GAAD,EAAM9K,UAAN,EAAqB;AAChC,gBAAI,EAAEvI,MAAMuI,UAAN,aAA6BvM,KAA/B,CAAJ,EAA2C;AACvCgE,sBAAMuI,UAAN,IAAoBvM,MAAM+iB,IAAN,CAAW,EAAEhjB,QAAQlB,CAAV,EAAX,CAApB;AACH;AACDmF,kBAAMuI,UAAN,EAAkB5M,IAAlB,CAAuB0X,GAAvB;AACH,SALD;AAMAxY;AACH,KARD;AASH,CAXD,E;;;;;;;;;;;;;;;;ACNA;;;;;;AAMA,SAASwvB,mBAAT,CAA8BjuB,IAA9B,EAAoC;AAChC,QAAIA,gBAAgB9C,IAApB,EAA0B;AACtB,eAAO8C,IAAP;AACH;;AAED,WAAO,IAAI9C,IAAJ,CAAS8C,IAAT,CAAP;AACH;AACD;;;;;;;AAOA,SAASR,GAAT,CAAc6B,CAAd,EAAiB;AACb,WAAQA,IAAI,EAAL,SAAgBA,CAAhB,GAAuBA,CAA9B;AACH;AACD;;;;;;;;;AASA;;;;;;;AAOAP,OAAOotB,MAAP,GAAgB,UAAUjtB,IAAV,EAAgB;AAC5B,WAAOA,KAAKY,OAAL,CAAa,0BAAb,EAAyC,MAAzC,CAAP;AACH,CAFD;;AAIA;;;;;;;;AAQA,0BAA2B,SAAS4R,iBAAT,CAA4B/V,MAA5B,EAAoC;AAC3D,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKywB,QAAL,GAAgB7kB,SAAhB;AACA,SAAKyO,UAAL,GAAkBzO,SAAlB;AACH;;AAED;AACAmK,kBAAkB2a,YAAlB,GAAiC,GAAjC;;AAEA;AACA;AACA3a,kBAAkB4a,uBAAlB,GAA4C;AACxCC,UAAM,CADkC;AAExCC,WAAO,CAFiC;AAGxCC,SAAK,CAHmC;AAIxCC,UAAM,CAJkC;AAKxCC,YAAQ,CALgC;AAMxCC,YAAQ,CANgC;AAOxCC,iBAAa;AAP2B,CAA5C;;AAUA;;;;;;;AAOAnb,kBAAkBob,mBAAlB,GAAwC,UAAUC,MAAV,EAAkB;AACtD,WAAO,UAAU7X,GAAV,EAAe;AAClB,YAAIW,kBAAJ;AACA,YAAIkB,SAASlB,YAAYmX,SAAS9X,GAAT,EAAc,EAAd,CAArB,CAAJ,EAA6C;AACzC,mBAAOW,SAAP;AACH;;AAED,eAAOkX,MAAP;AACH,KAPD;AAQH,CATD;;AAWA;;;;;;;;AAQArb,kBAAkBub,kBAAlB,GAAuC,UAAU3R,KAAV,EAAiByR,MAAjB,EAAyB;AAC5D,WAAO,UAAC7X,GAAD,EAAS;AACZ,YAAIxY,UAAJ;AACA,YAAIwwB,UAAJ;;AAEA,YAAI,CAAChY,GAAL,EAAU;AAAE,mBAAO6X,MAAP;AAAgB;;AAE5B,YAAMI,OAAOjY,IAAIkU,WAAJ,EAAb;;AAEA,aAAK1sB,IAAI,CAAJ,EAAOwwB,IAAI5R,MAAM1d,MAAtB,EAA8BlB,IAAIwwB,CAAlC,EAAqCxwB,GAArC,EAA0C;AACtC,gBAAI4e,MAAM5e,CAAN,EAAS0sB,WAAT,OAA2B+D,IAA/B,EAAqC;AACjC,uBAAOzwB,CAAP;AACH;AACJ;;AAED,YAAIA,MAAM6K,SAAV,EAAqB;AACjB,mBAAOwlB,MAAP;AACH;AACD,eAAO,IAAP;AACH,KAlBD;AAmBH,CApBD;;AAsBA;;;;;;;;;;;;;;;;;AAiBArb,kBAAkB0b,mBAAlB,GAAwC,YAAY;AAChD,QAAMC,UAAU;AACZC,eAAO,CACH,KADG,EAEH,KAFG,EAGH,KAHG,EAIH,KAJG,EAKH,KALG,EAMH,KANG,EAOH,KAPG,CADK;AAUZC,cAAM,CACF,QADE,EAEF,QAFE,EAGF,SAHE,EAIF,WAJE,EAKF,UALE,EAMF,QANE,EAOF,UAPE;AAVM,KAAhB;AAoBA,QAAMC,YAAY;AACdF,eAAO,CACH,KADG,EAEH,KAFG,EAGH,KAHG,EAIH,KAJG,EAKH,KALG,EAMH,KANG,EAOH,KAPG,EAQH,KARG,EASH,KATG,EAUH,KAVG,EAWH,KAXG,EAYH,KAZG,CADO;AAedC,cAAM,CACF,SADE,EAEF,UAFE,EAGF,OAHE,EAIF,OAJE,EAKF,KALE,EAMF,MANE,EAOF,MAPE,EAQF,QARE,EASF,WATE,EAUF,SAVE,EAWF,UAXE,EAYF,UAZE;AAfQ,KAAlB;;AA+BA,QAAME,cAAc;AAChBC,WAAG;AACC;AACAjxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;;AAEA,uBAAO5O,EAAEsnB,QAAF,GAAaC,QAAb,EAAP;AACH;AAVF,SADa;AAahBX,WAAG;AACC;AACAzwB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMhX,QAAQoI,EAAEsnB,QAAF,KAAe,EAA7B;;AAEA,uBAAO,CAAC1vB,UAAU,CAAV,GAAc,EAAd,GAAmBA,KAApB,EAA2B2vB,QAA3B,EAAP;AACH;AAXF,SAba;AA0BhBC,WAAG;AACC;AACArxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,SAAP;AAAmB,aAJjC;;AAKC/Y,oBAAQ,gBAACM,GAAD,EAAS;AACb,oBAAIA,GAAJ,EAAS;AACL,2BAAOA,IAAIkU,WAAJ,EAAP;AACH;AACD,uBAAO,IAAP;AACH,aAVF;AAWCvjB,uBAAW,mBAACqP,GAAD,EAAS;AAChB,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMhX,QAAQoI,EAAEsnB,QAAF,EAAd;;AAEA,uBAAQ1vB,QAAQ,EAAR,GAAa,IAAb,GAAoB,IAA5B;AACH;AAhBF,SA1Ba;AA4ChB6vB,WAAG;AACC;AACAtxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,SAAP;AAAmB,aAJjC;;AAKC/Y,oBAAQ,gBAACM,GAAD,EAAS;AACb,oBAAIA,GAAJ,EAAS;AACL,2BAAOA,IAAIkU,WAAJ,EAAP;AACH;AACD,uBAAO,IAAP;AACH,aAVF;AAWCvjB,uBAAW,mBAACqP,GAAD,EAAS;AAChB,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMhX,QAAQoI,EAAEsnB,QAAF,EAAd;;AAEA,uBAAQ1vB,QAAQ,EAAR,GAAa,IAAb,GAAoB,IAA5B;AACH;AAhBF,SA5Ca;AA8DhB8vB,WAAG;AACC;AACAvxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM+Y,OAAO3nB,EAAE4nB,UAAF,EAAb;;AAEA,uBAAOzwB,IAAIwwB,IAAJ,CAAP;AACH;AAXF,SA9Da;AA2EhBE,WAAG;AACC;AACA1xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM5W,UAAUgI,EAAE8nB,UAAF,EAAhB;;AAEA,uBAAO3wB,IAAIa,OAAJ,CAAP;AACH;AAXF,SA3Ea;AAwFhB+vB,WAAG;AACC;AACA5xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMoZ,KAAKhoB,EAAEioB,eAAF,EAAX;;AAEA,uBAAOD,GAAGT,QAAH,EAAP;AACH;AAXF,SAxFa;AAqGhBtX,WAAG;AACC;AACA9Z,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWN,QAAQC,KAAR,CAAczwB,IAAd,CAAmB,GAAnB,CAAX;AAAwC,aAJtD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCI,QAAQC,KAA7C,CALT;AAMCznB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEmoB,MAAF,EAAZ;;AAEA,uBAAQpB,QAAQC,KAAR,CAAckB,GAAd,CAAD,CAAqBX,QAArB,EAAP;AACH;AAXF,SArGa;AAkHhBa,WAAG;AACC;AACAjyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWN,QAAQE,IAAR,CAAa1wB,IAAb,CAAkB,GAAlB,CAAX;AAAuC,aAJrD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCI,QAAQE,IAA7C,CALT;AAMC1nB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEmoB,MAAF,EAAZ;;AAEA,uBAAQpB,QAAQE,IAAR,CAAaiB,GAAb,CAAD,CAAoBX,QAApB,EAAP;AACH;AAXF,SAlHa;AA+HhBjnB,WAAG;AACC;AACAnK,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEqoB,OAAF,EAAZ;;AAEA,uBAAOH,IAAIX,QAAJ,EAAP;AACH;AAXF,SA/Ha;AA4IhBvnB,WAAG;AACC;AACA7J,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMsZ,MAAMloB,EAAEqoB,OAAF,EAAZ;;AAEA,uBAAOlxB,IAAI+wB,GAAJ,CAAP;AACH;AAXF,SA5Ia;AAyJhBhY,WAAG;AACC;AACA/Z,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWH,UAAUF,KAAV,CAAgBzwB,IAAhB,CAAqB,GAArB,CAAX;AAA0C,aAJxD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCO,UAAUF,KAA/C,CALT;AAMCznB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM0Z,QAAQtoB,EAAEuoB,QAAF,EAAd;;AAEA,uBAAQrB,UAAUF,KAAV,CAAgBsB,KAAhB,CAAD,CAAyBf,QAAzB,EAAP;AACH;AAXF,SAzJa;AAsKhBiB,WAAG;AACC;AACAryB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,6BAAWH,UAAUD,IAAV,CAAe1wB,IAAf,CAAoB,GAApB,CAAX;AAAyC,aAJvD;;AAKC+X,oBAAQlD,kBAAkBub,kBAAlB,CAAqCO,UAAUD,IAA/C,CALT;AAMC1nB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM0Z,QAAQtoB,EAAEuoB,QAAF,EAAd;;AAEA,uBAAQrB,UAAUD,IAAV,CAAeqB,KAAf,CAAD,CAAwBf,QAAxB,EAAP;AACH;AAXF,SAtKa;AAmLhB3D,WAAG;AACC;AACAztB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;AAKC/Y,kBALD,kBAKSM,GALT,EAKc;AAAE,uBAAOxD,kBAAkBob,mBAAlB,GAAwC5X,GAAxC,IAA+C,CAAtD;AAA0D,aAL1E;AAMCrP,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAM0Z,QAAQtoB,EAAEuoB,QAAF,EAAd;;AAEA,uBAAOpxB,IAAImxB,QAAQ,CAAZ,CAAP;AACH;AAXF,SAnLa;AAgMhBG,WAAG;AACC;AACAtyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,UAAP;AAAoB,aAJlC;AAKC/Y,kBALD,kBAKSM,GALT,EAKc;AACT,oBAAIG,eAAJ;AACA,oBAAIH,GAAJ,EAAS;AACL,wBAAMgY,IAAIhY,IAAItX,MAAd;AACAsX,0BAAMA,IAAI8Z,SAAJ,CAAc9B,IAAI,CAAlB,EAAqBA,CAArB,CAAN;AACH;AACD,oBAAIrX,YAAYnE,kBAAkBob,mBAAlB,GAAwC5X,GAAxC,CAAhB;AACA,oBAAI+Z,cAAc,IAAI9zB,IAAJ,EAAlB;AACA,oBAAI+zB,cAAcpY,KAAKqY,KAAL,CAAYF,YAAYG,WAAZ,EAAD,GAA8B,GAAzC,CAAlB;;AAEA/Z,8BAAY6Z,WAAZ,GAA0BrZ,SAA1B;;AAEA,oBAAIqW,oBAAoB7W,MAApB,EAA4B+Z,WAA5B,KAA4CH,YAAYG,WAAZ,EAAhD,EAA2E;AACvE/Z,mCAAY6Z,cAAc,CAA1B,IAA8BrZ,SAA9B;AACH;AACD,uBAAOqW,oBAAoB7W,MAApB,EAA4B+Z,WAA5B,EAAP;AACH,aArBF;AAsBCvpB,qBAtBD,qBAsBYqP,GAtBZ,EAsBiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAInX,OAAOuI,EAAE8oB,WAAF,GAAgBvB,QAAhB,EAAX;AACA,oBAAIX,UAAJ;;AAEA,oBAAInvB,IAAJ,EAAU;AACNmvB,wBAAInvB,KAAKH,MAAT;AACAG,2BAAOA,KAAKixB,SAAL,CAAe9B,IAAI,CAAnB,EAAsBA,CAAtB,CAAP;AACH;;AAED,uBAAOnvB,IAAP;AACH;AAjCF,SAhMa;AAmOhBsxB,WAAG;AACC;AACA5yB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAICsiB,mBAJD,qBAIY;AAAE,uBAAO,UAAP;AAAoB,aAJlC;;AAKC/Y,oBAAQlD,kBAAkBob,mBAAlB,EALT;AAMCjnB,qBAND,qBAMYqP,GANZ,EAMiB;AACZ,oBAAM5O,IAAI4lB,oBAAoBhX,GAApB,CAAV;AACA,oBAAMnX,OAAOuI,EAAE8oB,WAAF,GAAgBvB,QAAhB,EAAb;;AAEA,uBAAO9vB,IAAP;AACH;AAXF;AAnOa,KAApB;;AAkPA,WAAO0vB,WAAP;AACH,CAvSD;;AAySA;;;;;;AAMA/b,kBAAkB4d,mBAAlB,GAAwC,YAAY;AAChD,QAAM7B,cAAc/b,kBAAkB0b,mBAAlB,EAApB;;AAEA,WAAO;AACHV,cAAMe,YAAYC,CADf;AAEH6B,iBAAS9B,YAAYP,CAFlB;AAGHsC,wBAAgB/B,YAAYK,CAHzB;AAIH2B,wBAAgBhC,YAAYM,CAJzB;AAKHpB,gBAAQc,YAAYO,CALjB;AAMHpB,gBAAQa,YAAYU,CANjB;AAOHuB,mBAAWjC,YAAYlX,CAPpB;AAQHoZ,kBAAUlC,YAAYiB,CARnB;AASHkB,sBAAcnC,YAAY7mB,CATvB;AAUHipB,qCAA6BpC,YAAYnnB,CAVtC;AAWHwpB,qBAAarC,YAAYjX,CAXtB;AAYHuZ,oBAAYtC,YAAYqB,CAZrB;AAaHkB,uBAAevC,YAAYvD,CAbxB;AAcH+F,oBAAYxC,YAAYsB,CAdrB;AAeHmB,mBAAWzC,YAAY4B;AAfpB,KAAP;AAiBH,CApBD;;AAsBA;;;;;;;AAOA3d,kBAAkBye,aAAlB,GAAkC,YAAY;AAC1C,QAAM1C,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAMgD,kBAAkB,SAAlBA,eAAkB,GAAa;AAAE;AACnC,YAAI1zB,IAAI,CAAR;AACA,YAAIktB,YAAJ;AACA,YAAIyG,oBAAJ;AACA,YAAMnD,IAAI,UAAKtvB,MAAf;;AAEA,eAAOlB,IAAIwwB,CAAX,EAAcxwB,GAAd,EAAmB;AACfktB,sCAAWltB,CAAX,yBAAWA,CAAX;AACA,oCAASA,CAAT,yBAASA,CAAT,GAAa;AACT2zB,8BAAczG,GAAd;AACH;AACJ;;AAED,YAAI,CAACyG,WAAL,EAAkB;AAAE,mBAAO,IAAP;AAAc;;AAElC,eAAOA,YAAY,CAAZ,EAAezb,MAAf,CAAsByb,YAAY,CAAZ,CAAtB,CAAP;AACH,KAhBD;;AAkBA,WAAO;AACH9D,cAAM,CAACkB,YAAYsB,CAAb,EAAgBtB,YAAY4B,CAA5B,EACFe,eADE,CADH;AAIH5D,eAAO,CAACiB,YAAYjX,CAAb,EAAgBiX,YAAYqB,CAA5B,EAA+BrB,YAAYvD,CAA3C,EACHkG,eADG,CAJJ;AAOH3D,aAAK,CAACgB,YAAYlX,CAAb,EAAgBkX,YAAYiB,CAA5B,EAA+BjB,YAAY7mB,CAA3C,EAA8C6mB,YAAYnnB,CAA1D,EACD8pB,eADC,CAPF;AAUH1D,cAAM,CAACe,YAAYC,CAAb,EAAgBD,YAAYP,CAA5B,EAA+BO,YAAYK,CAA3C,EAA8CL,YAAYM,CAA1D,EACF,UAAUuC,YAAV,EAAwBC,YAAxB,EAAsCC,SAAtC,EAAiDC,SAAjD,EAA4D;AACxD,gBAAIJ,oBAAJ;AACA,gBAAIK,eAAJ;AACA,gBAAIC,aAAJ;AACA,gBAAIzb,YAAJ;;AAEA,gBAAIqb,iBAAiBG,SAAUF,aAAaC,SAAxC,CAAJ,EAAyD;AACrD,oBAAIC,OAAO,CAAP,EAAU9b,MAAV,CAAiB8b,OAAO,CAAP,CAAjB,MAAgC,IAApC,EAA0C;AACtCC,2BAAO,IAAP;AACH;;AAEDN,8BAAcE,YAAd;AACH,aAND,MAMO,IAAIA,YAAJ,EAAkB;AACrBF,8BAAcE,YAAd;AACH,aAFM,MAEA;AACHF,8BAAcC,YAAd;AACH;;AAED,gBAAI,CAACD,WAAL,EAAkB;AAAE,uBAAO,IAAP;AAAc;;AAElCnb,kBAAMmb,YAAY,CAAZ,EAAezb,MAAf,CAAsByb,YAAY,CAAZ,CAAtB,CAAN;AACA,gBAAIM,IAAJ,EAAU;AACNzb,uBAAO,EAAP;AACH;AACD,mBAAOA,GAAP;AACH,SA1BC,CAVH;AAsCHyX,gBAAQ,CAACc,YAAYO,CAAb,EACJoC,eADI,CAtCL;AAyCHxD,gBAAQ,CAACa,YAAYU,CAAb,EACJiC,eADI;AAzCL,KAAP;AA6CH,CAjED;;AAmEA;;;;;AAKA1e,kBAAkBkf,UAAlB,GAA+B,UAAUj1B,MAAV,EAAkB;AAC7C,QAAMk1B,cAAcnf,kBAAkB2a,YAAtC;AACA,QAAMoB,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAM0D,gBAAgB1zB,OAAOmI,IAAP,CAAYkoB,WAAZ,CAAtB;AACA,QAAMsD,aAAa,EAAnB;AACA,QAAIr0B,UAAJ;AACA,QAAIs0B,oBAAJ;;AAEA,WAAO,CAACt0B,IAAIf,OAAOsL,OAAP,CAAe4pB,WAAf,EAA4Bn0B,IAAI,CAAhC,CAAL,KAA4C,CAAnD,EAAsD;AAClDs0B,sBAAcr1B,OAAOe,IAAI,CAAX,CAAd;AACA,YAAIo0B,cAAc7pB,OAAd,CAAsB+pB,WAAtB,MAAuC,CAAC,CAA5C,EAA+C;AAAE;AAAW;;AAE5DD,mBAAWvzB,IAAX,CAAgB;AACZ6N,mBAAO3O,CADK;AAEZgD,mBAAOsxB;AAFK,SAAhB;AAIH;;AAED,WAAOD,UAAP;AACH,CAnBD;;AAqBA;;;;;;AAMArf,kBAAkBuF,QAAlB,GAA6B,UAAUhZ,IAAV,EAAgBtC,MAAhB,EAAwB;AACjD,QAAMs1B,QAAQ/E,oBAAoBjuB,IAApB,CAAd;AACA,QAAM8yB,aAAarf,kBAAkBkf,UAAlB,CAA6Bj1B,MAA7B,CAAnB;AACA,QAAM8xB,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAI8D,eAAe9b,OAAOzZ,MAAP,CAAnB;AACA,QAAMk1B,cAAcnf,kBAAkB2a,YAAtC;AACA,QAAI3sB,cAAJ;AACA,QAAIyxB,qBAAJ;AACA,QAAIz0B,UAAJ;AACA,QAAIwwB,UAAJ;;AAEA,SAAKxwB,IAAI,CAAJ,EAAOwwB,IAAI6D,WAAWnzB,MAA3B,EAAmClB,IAAIwwB,CAAvC,EAA0CxwB,GAA1C,EAA+C;AAC3CgD,gBAAQqxB,WAAWr0B,CAAX,EAAcgD,KAAtB;AACAyxB,uBAAe1D,YAAY/tB,KAAZ,EAAmBmG,SAAnB,CAA6BorB,KAA7B,CAAf;AACAC,uBAAeA,aAAapxB,OAAb,CAAqB,IAAIf,MAAJ,CAAW8xB,cAAcnxB,KAAzB,EAAgC,GAAhC,CAArB,EAA2DyxB,YAA3D,CAAf;AACH;;AAED,WAAOD,YAAP;AACH,CAlBD;;AAoBA;;;;;AAKAxf,kBAAkB0f,SAAlB,CAA4B71B,KAA5B,GAAoC,UAAU81B,aAAV,EAAyBnuB,OAAzB,EAAkC;AAClE,QAAMitB,gBAAgBze,kBAAkBye,aAAlB,EAAtB;AACA,QAAM/D,WAAW,KAAKkF,iBAAL,CAAuBD,aAAvB,CAAjB;AACA,QAAME,aAAa7f,kBAAkB4a,uBAArC;AACA,QAAMkF,UAAUtuB,WAAWA,QAAQsuB,OAAnC;AACA,QAAMC,aAAa,EAAnB;AACA,QAAMhsB,OAAO,EAAb;AACA,QAAIisB,oBAAJ;AACA,QAAIC,uBAAJ;AACA,QAAIC,mBAAJ;AACA,QAAI1c,YAAJ;AACA,QAAIxY,UAAJ;AACA,QAAIm1B,cAAJ;AACA,QAAIC,oBAAJ;AACA,QAAI5E,UAAJ;AACA,QAAI7X,SAAS,EAAb;;AAEA,SAAKqc,WAAL,IAAoBvB,aAApB,EAAmC;AAC/B,YAAI,CAAC,GAAG4B,cAAH,CAAkB3rB,IAAlB,CAAuB+pB,aAAvB,EAAsCuB,WAAtC,CAAL,EAAyD;AAAE;AAAW;;AAEtEjsB,aAAK7H,MAAL,GAAc,CAAd;AACA+zB,yBAAiBxB,cAAcuB,WAAd,CAAjB;AACAE,qBAAaD,eAAeptB,MAAf,CAAsBotB,eAAe/zB,MAAf,GAAwB,CAA9C,EAAiD,CAAjD,EAAoD,CAApD,CAAb;;AAEA,aAAKlB,IAAI,CAAJ,EAAOwwB,IAAIyE,eAAe/zB,MAA/B,EAAuClB,IAAIwwB,CAA3C,EAA8CxwB,GAA9C,EAAmD;AAC/Cm1B,oBAAQF,eAAej1B,CAAf,CAAR;AACAwY,kBAAMkX,SAASyF,MAAMp1B,IAAf,CAAN;;AAEA,gBAAIyY,QAAQ3N,SAAZ,EAAuB;AACnB9B,qBAAKjI,IAAL,CAAU,IAAV;AACH,aAFD,MAEO;AACHiI,qBAAKjI,IAAL,CAAU,CAACq0B,KAAD,EAAQ3c,GAAR,CAAV;AACH;AACJ;;AAED4c,sBAAcF,WAAWI,KAAX,CAAiB,IAAjB,EAAuBvsB,IAAvB,CAAd;;AAEA,YAAI,CAACqsB,gBAAgBvqB,SAAhB,IAA6BuqB,gBAAgB,IAA9C,KAAuD,CAACN,OAA5D,EAAqE;AACjE;AACH;;AAEDC,mBAAWF,WAAWG,WAAX,CAAX,IAAsCI,WAAtC;AACH;;AAED,QAAIL,WAAW7zB,MAAX,IAAqB,KAAKq0B,eAAL,CAAqBR,WAAW7zB,MAAhC,CAAzB,EACC;AACGyX,eAAOkN,OAAP,CAAekP,WAAW,CAAX,CAAf,EAA8B,CAA9B,EAAiC,CAAjC;AAAsC,KAF1C,MAGK;AACDpc,eAAOkN,OAAP,eAAkBkP,UAAlB;AACH;;AAED,WAAOpc,MAAP;AACH,CApDD;;AAsDA;;;;;AAKA3D,kBAAkB0f,SAAlB,CAA4BE,iBAA5B,GAAgD,UAAUD,aAAV,EAAyB;AACrE,QAAM11B,SAAS,KAAKA,MAApB;AACA,QAAM8xB,cAAc/b,kBAAkB0b,mBAAlB,EAApB;AACA,QAAMyD,cAAcnf,kBAAkB2a,YAAtC;AACA,QAAM0E,aAAarf,kBAAkBkf,UAAlB,CAA6Bj1B,MAA7B,CAAnB;AACA,QAAMu2B,WAAW,EAAjB;;AAEA,QAAIC,4BAAJ;AACA,QAAIC,eAAJ;AACA,QAAIC,iBAAJ;AACA,QAAIC,mBAAJ;AACA,QAAIC,oBAAJ;;AAEA,QAAIrF,UAAJ;AACA,QAAIxwB,UAAJ;;AAEA61B,kBAAcnd,OAAOzZ,MAAP,CAAd;;AAEA,QAAM62B,WAAWzB,WAAWv0B,GAAX,CAAe;AAAA,eAAO0N,IAAIxK,KAAX;AAAA,KAAf,CAAjB;AACA,QAAM+yB,mBAAmB1B,WAAWnzB,MAApC;AACA,SAAKlB,IAAI+1B,mBAAmB,CAA5B,EAA+B/1B,KAAK,CAApC,EAAuCA,GAAvC,EAA4C;AACxC21B,mBAAWtB,WAAWr0B,CAAX,EAAc2O,KAAzB;;AAEA,YAAIgnB,WAAW,CAAX,KAAiBE,YAAY30B,MAAZ,GAAqB,CAA1C,EAA6C;AACzCu0B,kCAAsBE,QAAtB;AACA;AACH;;AAED,YAAIF,wBAAwB5qB,SAA5B,EAAuC;AACnC4qB,kCAAsBI,YAAY30B,MAAlC;AACH;;AAED00B,qBAAaC,YAAYvD,SAAZ,CAAsBqD,WAAW,CAAjC,EAAoCF,mBAApC,CAAb;AACAI,sBAAcA,YAAYvD,SAAZ,CAAsB,CAAtB,EAAyBqD,WAAW,CAApC,IACVtzB,OAAOotB,MAAP,CAAcmG,UAAd,CADU,GAEVC,YAAYvD,SAAZ,CAAsBmD,mBAAtB,EAA2CI,YAAY30B,MAAvD,CAFJ;;AAIAu0B,8BAAsBE,QAAtB;AACH;;AAED,SAAK31B,IAAI,CAAT,EAAYA,IAAI+1B,gBAAhB,EAAkC/1B,GAAlC,EAAuC;AACnC01B,iBAASrB,WAAWr0B,CAAX,CAAT;AACA61B,sBAAcA,YAAYzyB,OAAZ,CAAoB+wB,cAAcuB,OAAO1yB,KAAzC,EAAgD+tB,YAAY2E,OAAO1yB,KAAnB,EAA0BiuB,OAA1B,EAAhD,CAAd;AACH;;AAED,QAAM+E,gBAAgBrB,cAAc7b,KAAd,CAAoB,IAAIzW,MAAJ,CAAWwzB,WAAX,CAApB,KAAgD,EAAtE;AACAG,kBAAc7H,KAAd;;AAEA,SAAKnuB,IAAI,CAAJ,EAAOwwB,IAAIsF,SAAS50B,MAAzB,EAAiClB,IAAIwwB,CAArC,EAAwCxwB,GAAxC,EAA6C;AACzCw1B,iBAASM,SAAS91B,CAAT,CAAT,IAAwBg2B,cAAch2B,CAAd,CAAxB;AACH;AACD,WAAOw1B,QAAP;AACH,CApDD;;AAsDA;;;;;AAKAxgB,kBAAkB0f,SAAlB,CAA4Bnb,aAA5B,GAA4C,UAAUob,aAAV,EAAyB;AACjE,QAAIpzB,OAAO,IAAX;AACA,QAAI2J,OAAOmP,QAAP,CAAgBsa,aAAhB,CAAJ,EAAoC;AAChCpzB,eAAO,IAAI9C,IAAJ,CAASk2B,aAAT,CAAP;AACH,KAFD,MAEO,IAAI,CAAC,KAAK11B,MAAN,IAAgBR,KAAKI,KAAL,CAAW81B,aAAX,CAApB,EAA+C;AAClDpzB,eAAO,IAAI9C,IAAJ,CAASk2B,aAAT,CAAP;AACH,KAFM,MAGF;AACD,YAAMjF,WAAW,KAAKA,QAAL,GAAgB,KAAK7wB,KAAL,CAAW81B,aAAX,CAAjC;AACA,YAAIjF,SAASxuB,MAAb,EAAqB;AACjB,iBAAKoY,UAAL,sCAAsB7a,IAAtB,mCAA8BixB,QAA9B;AACAnuB,mBAAO,KAAK+X,UAAZ;AACH;AACJ;AACD,WAAO/X,IAAP;AACH,CAfD;;AAiBAyT,kBAAkB0f,SAAlB,CAA4Ba,eAA5B,GAA8C,UAAS3X,GAAT,EAAc;AACxD,WAAOA,QAAQ,CAAR,IAAa,KAAK3e,MAAL,CAAY6Z,KAAZ,CAAkB,MAAlB,EAA0B5X,MAA9C;AACH,CAFD;;AAIA;;;;;;AAMA8T,kBAAkB0f,SAAlB,CAA4Bna,QAA5B,GAAuC,UAAUtb,MAAV,EAAkB01B,aAAlB,EAAiC;AACpE,QAAIrb,mBAAJ;;AAEA,QAAIqb,aAAJ,EAAmB;AACfrb,qBAAa,KAAKA,UAAL,GAAkB,KAAKC,aAAL,CAAmBob,aAAnB,CAA/B;AACH,KAFD,MAEO,IAAI,EAAErb,aAAa,KAAKA,UAApB,CAAJ,EAAqC;AACxCA,qBAAa,KAAKC,aAAL,CAAmBob,aAAnB,CAAb;AACH;;AAED,WAAO3f,kBAAkBuF,QAAlB,CAA2BjB,UAA3B,EAAuCra,MAAvC,CAAP;AACH,CAVD;;;;;;;;;;;;;;ACluBA;AAAA;;;;;;AAMe,yEAACqH,IAAD,EAAU;AACrB,QAAIuQ,MAAM3L,OAAO4L,iBAAjB;AACA,QAAIC,MAAM7L,OAAO8L,iBAAjB;;AAEA1Q,SAAK1F,OAAL,CAAa,UAACgJ,CAAD,EAAO;AAChB,YAAIA,IAAIiN,GAAR,EAAa;AACTA,kBAAMjN,CAAN;AACH;AACD,YAAIA,IAAImN,GAAR,EAAa;AACTA,kBAAMnN,CAAN;AACH;AACJ,KAPD;;AASA,WAAO,CAACiN,GAAD,EAAME,GAAN,CAAP;AACH,CAdD,E;;;;;;;;;;;;;;;;ACNA;AACA,IAAMkf,eAAe,QAArB;AACA,IAAMC,gBAAgBx1B,OAAOg0B,SAAP,CAAiBvD,QAAvC;AACA,IAAMgF,cAAc,iBAApB;AACA,IAAMC,aAAa,gBAAnB;;AAEA,SAASC,cAAT,CAAwB7oB,GAAxB,EAA6B8oB,SAA7B,EAAwC;AACpC,QAAIt2B,IAAIs2B,UAAUp1B,MAAlB;AACA,QAAIq1B,SAAS,CAAC,CAAd;;AAEA,WAAOv2B,CAAP,EAAU;AACN,YAAIwN,QAAQ8oB,UAAUt2B,CAAV,CAAZ,EAA0B;AACtBu2B,qBAASv2B,CAAT;AACA,mBAAOu2B,MAAP;AACH;AACDv2B,aAAK,CAAL;AACH;;AAED,WAAOu2B,MAAP;AACH;;AAED,SAASrL,KAAT,CAAesL,IAAf,EAAqBC,IAArB,EAA2BC,SAA3B,EAAsCC,MAAtC,EAA8CC,MAA9C,EAAsD;AAClD,QAAIhuB,IAAJ,EACIiuB,MADJ,EAEIC,MAFJ,EAGIxuB,GAHJ,EAIIyuB,IAJJ;AAKA;AACA;AACA;;AAEA,QAAI,CAACH,MAAL,EAAa;AACTD,iBAAS,CAACH,IAAD,CAAT;AACAI,iBAAS,CAACH,IAAD,CAAT;AACH,KAHD,MAIK;AACDE,eAAO71B,IAAP,CAAY01B,IAAZ;AACAI,eAAO91B,IAAP,CAAY21B,IAAZ;AACH;;AAED,QAAIA,gBAAgBt1B,KAApB,EAA2B;AACvB,aAAKyH,OAAO,CAAZ,EAAeA,OAAO6tB,KAAKv1B,MAA3B,EAAmC0H,QAAQ,CAA3C,EAA8C;AAC1C,gBAAI;AACAiuB,yBAASL,KAAK5tB,IAAL,CAAT;AACAkuB,yBAASL,KAAK7tB,IAAL,CAAT;AACH,aAHD,CAIA,OAAOsB,CAAP,EAAU;AACN;AACH;;AAED,gBAAI,QAAO4sB,MAAP,yCAAOA,MAAP,OAAkBb,YAAtB,EAAoC;AAChC,oBAAI,EAAES,aAAaI,WAAWjsB,SAA1B,CAAJ,EAA0C;AACtC2rB,yBAAK5tB,IAAL,IAAakuB,MAAb;AACH;AACJ,aAJD,MAKK;AACD,oBAAID,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBZ,YAAzC,EAAuD;AACnDY,6BAASL,KAAK5tB,IAAL,IAAakuB,kBAAkB31B,KAAlB,GAA0B,EAA1B,GAA+B,EAArD;AACH;AACD41B,uBAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,oBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,6BAASL,KAAK5tB,IAAL,IAAa+tB,OAAOI,IAAP,CAAtB;AACH,iBAFD,MAGK;AACD7L,0BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ;AACJ;AACJ,KA5BD,MA6BK;AACD,aAAKhuB,IAAL,IAAa6tB,IAAb,EAAmB;AACf,gBAAI;AACAI,yBAASL,KAAK5tB,IAAL,CAAT;AACAkuB,yBAASL,KAAK7tB,IAAL,CAAT;AACH,aAHD,CAIA,OAAOsB,CAAP,EAAU;AACN;AACH;;AAED,gBAAI4sB,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBb,YAAzC,EAAuD;AACnD;AACA;AACA;AACA;AACA3tB,sBAAM4tB,cAAcxsB,IAAd,CAAmBotB,MAAnB,CAAN;AACA,oBAAIxuB,QAAQ6tB,WAAZ,EAAyB;AACrB,wBAAIU,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBZ,YAAzC,EAAuD;AACnDY,iCAASL,KAAK5tB,IAAL,IAAa,EAAtB;AACH;AACDmuB,2BAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,wBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,iCAASL,KAAK5tB,IAAL,IAAa+tB,OAAOI,IAAP,CAAtB;AACH,qBAFD,MAGK;AACD7L,8BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ,iBAXD,MAYK,IAAItuB,QAAQ8tB,UAAZ,EAAwB;AACzB,wBAAIS,WAAW,IAAX,IAAmB,EAAEA,kBAAkB11B,KAApB,CAAvB,EAAmD;AAC/C01B,iCAASL,KAAK5tB,IAAL,IAAa,EAAtB;AACH;AACDmuB,2BAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,wBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,iCAASL,KAAK5tB,IAAL,IAAa+tB,OAAOI,IAAP,CAAtB;AACH,qBAFD,MAGK;AACD7L,8BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ,iBAXI,MAYA;AACDJ,yBAAK5tB,IAAL,IAAakuB,MAAb;AACH;AACJ,aAjCD,MAkCK;AACD,oBAAIJ,aAAaI,WAAWjsB,SAA5B,EAAuC;AACnC;AACH;AACD2rB,qBAAK5tB,IAAL,IAAakuB,MAAb;AACH;AACJ;AACJ;AACD,WAAON,IAAP;AACH;;AAGD,SAASnW,OAAT,CAAkBmW,IAAlB,EAAwBC,IAAxB,EAA8BC,SAA9B,EAAyC;AACrC;AACA,QAAI,QAAOF,IAAP,yCAAOA,IAAP,OAAgBP,YAAhB,IAAgC,QAAOQ,IAAP,yCAAOA,IAAP,OAAgBR,YAApD,EAAkE;AAC9D,eAAO,IAAP;AACH;;AAED,QAAI,QAAOQ,IAAP,yCAAOA,IAAP,OAAgBR,YAAhB,IAAgCQ,SAAS,IAA7C,EAAmD;AAC/C,eAAOD,IAAP;AACH;;AAED,QAAI,QAAOA,IAAP,yCAAOA,IAAP,OAAgBP,YAApB,EAAkC;AAC9BO,eAAOC,gBAAgBt1B,KAAhB,GAAwB,EAAxB,GAA6B,EAApC;AACH;AACD+pB,UAAMsL,IAAN,EAAYC,IAAZ,EAAkBC,SAAlB;AACA,WAAOF,IAAP;AACH;;;;;;;;;;;;;;;;;;;;;;;;;;;AC5ID;;AAEA;;;;;;AAMO,SAASnvB,OAAT,CAAkBmR,GAAlB,EAAuB;AAC1B,WAAOrX,MAAMkG,OAAN,CAAcmR,GAAd,CAAP;AACH;;AAED;;;;;;AAMO,SAASwe,QAAT,CAAmBxe,GAAnB,EAAwB;AAC3B,WAAOA,QAAQ9X,OAAO8X,GAAP,CAAf;AACH;;AAED;;;;;;AAMO,SAASye,QAAT,CAAmBze,GAAnB,EAAwB;AAC3B,WAAO,OAAOA,GAAP,KAAe,QAAtB;AACH;;AAED;;;;;;AAMO,SAAS8U,UAAT,CAAqB9U,GAArB,EAA0B;AAC7B,WAAO,OAAOA,GAAP,KAAe,UAAtB;AACH;;AAED;;;;;;AAMO,SAAS0e,YAAT,CAAuB5wB,IAAvB,EAA6B;AAChC,wCAAW,IAAIkQ,GAAJ,CAAQlQ,IAAR,CAAX;AACH;;AAEM,IAAMuP,cAAc,SAAdA,WAAc;AAAA,mBAAY,IAAIpX,IAAJ,GAAW+a,OAAX,EAAZ,GAAmCY,KAAK+c,KAAL,CAAW/c,KAAKgd,MAAL,KAAgB,KAA3B,CAAnC;AAAA,CAApB;;AAEP;;;;;;;AAOO,SAAS5O,UAAT,CAAoB6O,IAApB,EAA0BC,IAA1B,EAAgC;AACnC,QAAI,CAACjwB,QAAQgwB,IAAR,CAAD,IAAkB,CAAChwB,QAAQiwB,IAAR,CAAvB,EAAsC;AAClC,eAAOD,SAASC,IAAhB;AACH;;AAED,QAAID,KAAKn2B,MAAL,KAAgBo2B,KAAKp2B,MAAzB,EAAiC;AAC7B,eAAO,KAAP;AACH;;AAED,SAAK,IAAIlB,IAAI,CAAb,EAAgBA,IAAIq3B,KAAKn2B,MAAzB,EAAiClB,GAAjC,EAAsC;AAClC,YAAIq3B,KAAKr3B,CAAL,MAAYs3B,KAAKt3B,CAAL,CAAhB,EAAyB;AACrB,mBAAO,KAAP;AACH;AACJ;;AAED,WAAO,IAAP;AACH;;AAED;;;;;;AAMO,SAASuY,YAAT,CAAsBC,GAAtB,EAA2B;AAC9B,WAAOA,GAAP;AACH;;AAED;;;;;;AAMO,IAAMrR,mBAAmB,SAAnBA,gBAAmB,CAACb,IAAD,EAAU;AACtC,QAAI2wB,SAAS3wB,IAAT,CAAJ,EAAoB;AAChB,eAAOF,iDAAUA,CAACO,OAAlB;AACH,KAFD,MAEO,IAAIU,QAAQf,IAAR,KAAiBe,QAAQf,KAAK,CAAL,CAAR,CAArB,EAAuC;AAC1C,eAAOF,iDAAUA,CAACK,OAAlB;AACH,KAFM,MAEA,IAAIY,QAAQf,IAAR,MAAkBA,KAAKpF,MAAL,KAAgB,CAAhB,IAAqB81B,SAAS1wB,KAAK,CAAL,CAAT,CAAvC,CAAJ,EAA+D;AAClE,eAAOF,iDAAUA,CAACS,SAAlB;AACH;AACD,WAAO,IAAP;AACH,CATM,C;;;;;;;;;;;;AC/FP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;ACHA;;AAEA;;;;;;;;;IAQM0wB,Y;AACF,4BAAe;AAAA;;AAAA;;AACX,aAAKpyB,KAAL,GAAa,IAAIC,GAAJ,EAAb;AACA,aAAKD,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6B0kB,oDAA7B;;AAEA3pB,eAAOga,OAAP,CAAesP,gDAAf,EAAuBppB,OAAvB,CAA+B,UAAC1C,GAAD,EAAS;AACpC,kBAAKiH,KAAL,CAAWQ,GAAX,CAAezH,IAAI,CAAJ,CAAf,EAAuBA,IAAI,CAAJ,CAAvB;AACH,SAFD;AAGH;;AAED;;;;;;;;;;;;;yCAS2B;AACvB,gBAAI,CAAC,UAAOgD,MAAZ,EAAoB;AAChB,uBAAO,KAAKiE,KAAL,CAAWc,GAAX,CAAe,YAAf,CAAP;AACH;;AAED,gBAAIuxB,0DAAJ;;AAEA,gBAAI,OAAOA,OAAP,KAAmB,UAAvB,EAAmC;AAC/B,qBAAKryB,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6B6xB,OAA7B;AACH,aAFD,MAEO;AACHA,0BAAU9e,OAAO8e,OAAP,CAAV;AACA,oBAAI92B,OAAOmI,IAAP,CAAYmhB,gDAAZ,EAAoBzf,OAApB,CAA4BitB,OAA5B,MAAyC,CAAC,CAA9C,EAAiD;AAC7C,yBAAKryB,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6BqkB,gDAAMA,CAACwN,OAAP,CAA7B;AACH,iBAFD,MAEO;AACH,0BAAM,IAAIxwB,KAAJ,cAAqBwwB,OAArB,4BAAN;AACH;AACJ;AACD,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iCAgCUz3B,I,EAAMy3B,O,EAAS;AAAA;;AACrB,gBAAI,OAAOA,OAAP,KAAmB,UAAvB,EAAmC;AAC/B,sBAAM,IAAIxwB,KAAJ,CAAU,8BAAV,CAAN;AACH;;AAEDjH,mBAAO2Y,OAAO3Y,IAAP,CAAP;AACA,iBAAKoF,KAAL,CAAWQ,GAAX,CAAe5F,IAAf,EAAqBy3B,OAArB;;AAEA,mBAAO,YAAM;AAAE,uBAAKC,YAAL,CAAkB13B,IAAlB;AAA0B,aAAzC;AACH;;;qCAEaA,I,EAAM;AAChB,gBAAI,KAAKoF,KAAL,CAAWa,GAAX,CAAejG,IAAf,CAAJ,EAA0B;AACtB,qBAAKoF,KAAL,CAAWY,MAAX,CAAkBhG,IAAlB;AACH;AACJ;;;gCAEQA,I,EAAM;AACX,gBAAIA,gBAAgBF,QAApB,EAA8B;AAC1B,uBAAOE,IAAP;AACH;AACD,mBAAO,KAAKoF,KAAL,CAAWc,GAAX,CAAelG,IAAf,CAAP;AACH;;;;;;AAGL,IAAMkM,eAAgB,YAAY;AAC9B,QAAI9G,QAAQ,IAAZ;;AAEA,aAASgB,QAAT,GAAqB;AACjB,YAAIhB,UAAU,IAAd,EAAoB;AAChBA,oBAAQ,IAAIoyB,YAAJ,EAAR;AACH;AACD,eAAOpyB,KAAP;AACH;AACD,WAAOgB,UAAP;AACH,CAVqB,EAAtB;;AAYe8F,2EAAf,E;;;;;;;;;;;;;;;;;;ACtHA;;AAEA;;;;;;;IAMMwB,K;;AAEJ;;;;;;AAME,mBAAatP,KAAb,EAAoB0c,QAApB,EAA8B1S,KAA9B,EAAqC;AAAA;;AACjC,YAAMyS,iBAAiBuJ,qEAAqBA,CAAChc,KAAtB,EAA6BhK,KAA7B,CAAvB;;AAEAuC,eAAOg3B,gBAAP,CAAwB,IAAxB,EAA8B;AAC1BjT,oBAAQ;AACJkT,4BAAY,KADR;AAEJC,8BAAc,KAFV;AAGJC,0BAAU,KAHN;AAIJ15B;AAJI,aADkB;AAO1B25B,6BAAiB;AACbH,4BAAY,KADC;AAEbC,8BAAc,KAFD;AAGbC,0BAAU,KAHG;AAIb15B,uBAAOyc;AAJM,aAPS;AAa1Bmd,4BAAgB;AACZJ,4BAAY,KADA;AAEZC,8BAAc,KAFF;AAGZC,0BAAU,KAHE;AAIZ15B,uBAAO0c;AAJK;AAbU,SAA9B;;AAqBA,aAAK1S,KAAL,GAAaA,KAAb;AACH;;AAEH;;;;;;;;;;;AAuBA;;;;;;;mCAOc;AACR,mBAAOuQ,OAAO,KAAKva,KAAZ,CAAP;AACH;;AAEH;;;;;;;;;kCAMa;AACP,mBAAO,KAAKA,KAAZ;AACH;;;4BArCY;AACT,mBAAO,KAAKsmB,MAAZ;AACH;;AAED;;;;;;4BAGsB;AAClB,mBAAO,KAAKqT,eAAZ;AACH;;AAED;;;;;;4BAGqB;AACjB,mBAAO,KAAKC,cAAZ;AACH;;;;;;AAwBUtqB,oEAAf,E","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = \"./src/index.js\");\n","export default function autoType(object) {\n for (var key in object) {\n var value = object[key].trim(), number;\n if (!value) value = null;\n else if (value === \"true\") value = true;\n else if (value === \"false\") value = false;\n else if (value === \"NaN\") value = NaN;\n else if (!isNaN(number = +value)) value = number;\n else if (/^([-+]\\d{2})?\\d{4}(-\\d{2}(-\\d{2})?)?(T\\d{2}:\\d{2}(:\\d{2}(\\.\\d{3})?)?(Z|[-+]\\d{2}:\\d{2})?)?$/.test(value)) value = new Date(value);\n else continue;\n object[key] = value;\n }\n return object;\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","export {default as dsvFormat} from \"./dsv\";\nexport {csvParse, csvParseRows, csvFormat, csvFormatBody, csvFormatRows} from \"./csv\";\nexport {tsvParse, tsvParseRows, tsvFormat, tsvFormatBody, tsvFormatRows} from \"./tsv\";\nexport {default as autoType} from \"./autoType\";\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataConverter from './model/dataConverter';\nimport { DSVStringConverter, DSVArrayConverter, JSONConverter, AutoDataConverter } from './defaultConverters';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n * Sets the given converters in the store and returns the store\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n * Registers a Converter of type DataConverter\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n * Rempves a converter from store\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","export { default as DSVStringConverter } from './dsvStringConverter';\nexport { default as JSONConverter } from './jsonConverter';\nexport { default as DSVArrayConverter } from './dsvArrayConverter';\nexport { default as AutoDataConverter } from './autoConverter';\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import converterStore from './dataConverterStore';\nimport DataConverter from './model/dataConverter';\n\nexport { DataConverter, converterStore };\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * FilteringMode determines if resultant DataModel should be created from selection set or rejection set.\n *\n * The following modes are available\n * - `NORMAL`: Only entries from selection set are included in the resulatant DataModel instance\n * - `INVERSE`: Only entries from rejection set are included in the resulatant DataModel instance\n * - ALL: Both the entries from selection and rejection set are returned in two different DataModel instance\n */\n\nexport { default as DataFormat } from './data-format';\nexport { default as DimensionSubtype } from './dimension-subtype';\nexport { default as MeasureSubtype } from './measure-subtype';\nexport { default as FieldType } from './field-type';\nexport { default as FilteringMode } from './filtering-mode';\nexport { default as GROUP_BY_FUNCTIONS } from './group-by-functions';\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport ContinuousParser from '../parsers/continuous-parser';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","export { default as Dimension } from './dimension';\nexport { default as Measure } from './measure';\nexport { default as FieldParser } from './parsers/field-parser';\nexport { default as fieldRegistry } from './field-registry';\nexport { columnMajor } from '../utils';\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS, ROW_ID } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects;\n\n const selectorHelperFn = index => selectFn(\n cachedValueObjects[index],\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nconst keyFn = (arr, fields, idx, rowId) => {\n const val = fields[arr[idx]].internalValue;\n return arr[idx] === ROW_ID ? rowId : val;\n};\n\nconst boundsChecker = {\n [MeasureSubtype.CONTINUOUS]: (val, domain) => {\n const domainArr = domain[0] instanceof Array ? domain : [domain];\n return domainArr.some(dom => val >= dom[0] && val <= dom[1]);\n }\n};\n\nconst isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain);\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((criteria = {}) => {\n const { identifiers = [[], []], range } = criteria;\n const [fieldNames = [], values = []] = identifiers;\n const dLen = fieldNames.length;\n const valuesMap = {};\n\n if (dLen) {\n for (let i = 1, len = identifiers.length; i < len; i++) {\n const row = identifiers[i];\n const key = row.join();\n valuesMap[key] = 1;\n }\n }\n const rangeKeys = Object.keys(range || {});\n return values.length || rangeKeys.length ? (fields, i) => {\n const present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true;\n\n if (filterByMeasure) {\n return rangeKeys.every((field) => {\n const val = fields[field].internalValue;\n return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype);\n }) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const { type, subtype, name } = unitSchema;\n if (type === FieldType.DIMENSION || type === FieldType.MEASURE) {\n if (!fieldRegistry.has(subtype)) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n } else {\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","export { createBinnedFieldData } from './bucket-creator';\nexport { compose, bin, select, project, groupBy as groupby } from './compose';\nexport { calculateVariable, sort } from './pure-operators';\nexport { crossProduct } from './cross-product';\nexport { dataBuilder } from './data-builder';\nexport { difference } from './difference';\nexport { getCommonSchema } from './get-common-schema';\nexport { defReducer, fnList } from './group-by-function';\nexport { groupBy, getFieldArr, getReducerObj } from './group-by';\nexport { mergeSort } from './merge-sort';\nexport { naturalJoinFilter } from './natural-join-filter-function';\nexport { naturalJoin } from './natural-join';\nexport { leftOuterJoin, rightOuterJoin, fullOuterJoin } from './outer-join';\nexport { rowDiffsetIterator } from './row-diffset-iterator';\nexport { union } from './union';\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * Generates domain for measure field.\n *\n * @param {Array} data - The array of data.\n * @return {Array} Returns the measure domain.\n */\nexport default (data) => {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n data.forEach((d) => {\n if (d < min) {\n min = d;\n }\n if (d > max) {\n max = d;\n }\n });\n\n return [min, max];\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","export { default as DateTimeFormatter } from './date-time-formatter';\nexport { default as columnMajor } from './column-major';\nexport { default as generateMeasureDomain } from './domain-generator';\nexport { default as extend2 } from './extend2';\nexport * from './helper';\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./node_modules/d3-dsv/src/autoType.js","webpack://DataModel/./node_modules/d3-dsv/src/csv.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./node_modules/d3-dsv/src/index.js","webpack://DataModel/./node_modules/d3-dsv/src/tsv.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/converter/defaultConverters/autoConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/defaultConverters/index.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/index.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/enums/index.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/helper.js","webpack://DataModel/./src/fields/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/index.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/index.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/natural-join.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/domain-generator.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/utils/index.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/value.js"],"names":["autoType","object","key","value","trim","number","NaN","isNaN","test","Date","csv","dsv","csvParse","parse","csvParseRows","parseRows","csvFormat","format","csvFormatBody","formatBody","csvFormatRows","formatRows","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","Function","map","name","i","JSON","stringify","join","customConverter","f","row","inferColumns","rows","columnSet","Object","create","forEach","column","push","pad","width","s","length","Array","formatYear","year","formatDate","date","hours","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","RegExp","DELIMITER","charCodeAt","text","convert","N","I","n","t","eof","eol","token","j","c","slice","replace","preformatBody","formatValue","concat","formatRow","tsv","tsvParse","tsvParseRows","tsvFormat","tsvFormatBody","tsvFormatRows","PROPAGATION","ROW_ID","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","AND","OR","DataConverterStore","store","Map","converters","_getDefaultConverters","DSVStringConverter","DSVArrayConverter","JSONConverter","AutoDataConverter","set","converter","type","DataConverter","delete","has","get","converterStore","getStore","DataFormat","AUTO","data","schema","options","DSV_ARR","DSVArr","DSV_STR","DSVStr","FLAT_JSON","FlatJSON","_type","Error","Auto","dataFormat","detectDataFormat","arr","isArray","defaultOption","firstRowHeader","schemaFields","unitSchema","assign","columnMajor","headers","splice","headerMap","reduce","acc","h","fields","field","schemaField","headIndex","str","fieldSeparator","d3Dsv","header","insertionIndex","schemaFieldsName","item","keys","DataModel","args","_onPropagation","defOptions","order","formatter","withUid","getAllFields","sort","getPartialFieldspace","dataGenerated","dataBuilder","call","_rowDiffset","d","_colIdentifier","columnWise","addUid","uids","fieldNames","e","fmtFieldNames","fmtFieldIdx","next","idx","indexOf","elem","fIdx","fmtFn","datum","datumIdx","undefined","rowDiffset","ids","diffSets","split","Number","start","end","fill","_","fieldsArr","reducers","config","saveChild","groupByString","params","newDataModel","groupBy","persistDerivations","defaultReducer","reducerStore","setParent","sortingDetails","rawData","getData","dataInCSVArr","sortedDm","constructor","_dataFormat","getFieldspace","colData","formattedData","rowsCount","serializedData","rowIdx","colIdx","fieldName","partialFieldspace","_partialFieldspace","cachedValueObjects","_cachedValueObjects","partialField","fieldsObj","obj","Value","fieldIndex","findIndex","fieldinst","_cachedFieldsObj","_cachedDimension","_cachedMeasure","__calculateFieldspace","calculateFieldsConfig","dependency","sanitizeUnitSchema","replaceVar","fieldsConfig","getFieldsConfig","depVars","retrieveFn","depFieldIndices","fieldSpec","index","clone","fs","suppliedFields","cachedStore","cloneProvider","detachedRoot","computedValues","rowDiffsetIterator","fieldsData","createFields","addField","identifiers","addToNameSpace","propConfig","isMutableAction","propagationSourceId","sourceId","payload","rootModel","getRootDataModel","propagationNameSpace","_propagationNameSpace","rootGroupByModel","getRootGroupByModel","rootModels","groupByModel","model","addToPropNamespace","propagateToAllDataModels","propagationSource","propagateImmutableActions","eventName","callback","propModel","propListeners","fn","measureFieldName","binFieldName","measureField","createBinnedFieldData","binnedData","bins","binField","FieldType","DIMENSION","subtype","DimensionSubtype","BINNED","serialize","getSchema","dimensionArr","reducerFn","defConfig","mode","FilteringMode","NORMAL","splitWithSelect","uniqueFields","commonFields","fieldConfig","allFields","normalizedProjFieldSets","fieldSet","getNormalizedProFields","splitWithProject","InvalidAwareTypes","invalidAwareVals","fieldRegistry","Relation","CATEGORICAL","TEMPORAL","MEASURE","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","MeasureSubtype","CONTINUOUS","Operators","compose","bin","select","project","calculateVariable","crossProduct","difference","naturalJoin","leftOuterJoin","rightOuterJoin","fullOuterJoin","union","version","pkg","Stats","DateTimeFormatter","FieldsUtility","enums","createUnitField","BUILDER","build","createUnitFieldFromPartial","dataColumn","headersObj","fieldStore","createNamespace","fieldArr","dataId","getUniqueId","getMeasure","measureFields","getDimension","dimensionFields","Binned","binsArr","BinnedParser","Dimension","Categorical","hash","Set","domain","add","CategoricalParser","Continuous","calculateContinuousDomain","ContinuousParser","Measure","_cachedDomain","calculateDataDomain","Field","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","Temporal","description","displayName","builder","_params","_context","PartialField","parser","min","POSITIVE_INFINITY","max","NEGATIVE_INFINITY","unit","defAggFn","defaultReducerName","numberFormat","formatNumber","val","regex","String","result","isInvalid","matched","match","parseFloat","NA","getInvalidType","FieldParser","parsedVal","TemporalParser","_dtf","nativeDate","getNativeDate","getTime","_sanitize","_cachedMinDiff","sortedData","filter","a","b","arrLn","minDiff","prevDatum","nextDatum","processedCount","Math","isFinite","parsedDatum","formatAs","prepareSelectionData","resp","entries","prepareJoinData","formattedValue","rawValue","updateFields","fieldStoreName","colIdentifier","collID","partialFieldMap","newFields","coll","persistCurrentDerivation","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","lastInsertedValueSel","lastInsertedValueRej","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","fieldStoreObj","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","selectorHelperFn","cloneWithAllFields","getKey","rowId","len","keyFn","internalValue","domainChecker","domainArr","some","dom","boundsChecker","isWithinDomain","fieldType","filterPropagationModel","propModels","fns","filterByDim","filterByMeasure","clonedModel","modelFieldsConfig","range","values","indices","def","dLen","valuesMap","rangeKeys","hasData","present","every","filteredModel","clonedDMs","cloned","derivation","derivationFormula","addDiffsetToClonedDm","selectConfig","cloneWithSelect","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithProject","projField","projectionSet","actualProjField","projFieldSet","projFields","extend2","validateUnitSchema","sanitizeAndValidateSchema","resolveFieldName","dataHeader","fieldNameAs","as","updateData","relation","defaultConfig","nameSpace","valueObjects","rawFieldsData","formattedFieldsData","fieldInSchema","getDerivationArguments","applyExistingOperationOnModel","dataModel","derivations","getDerivations","selectionModel","getFilteredModel","path","propagateIdentifiers","propModelInf","excludeModels","criterias","propagate","handlePropagation","children","_children","child","matchingCriteria","groupedModel","_parent","find","getPathToRootModel","propagationInf","propagateToSource","filterFn","entry","addGroupedModel","conf","crit","actionCriterias","mutableActions","filteredCriteria","sourceActionCriterias","actionInf","actionConf","applyOnSource","action","models","sourceIdentifiers","inf","propagationModel","reverse","immutableActions","filterImmutableAction","criteriaModel","sourceNamespace","normalizedProjField","search","from","getNumberFormattedVal","require","module","exports","default","_invalidAwareValsMap","_value","NULL","NIL","invalid","nil","null","generateBuckets","binSize","buckets","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","binsCount","dMin","dMax","ceil","abs","unshift","dm","operations","currentDM","firstChild","dispose","defaultFilterFn","dm1","dm2","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","getCommonSchema","tmpSchema","rowAdded","rowPosition","ii","tuple","userArg","cloneProvider1","cloneProvider2","dm1Fields","dm2Fields","tupleObj","cellVal","iii","retObj","reqSorting","tmpDataArr","colIArr","colName","insertInd","sortData","tmpData","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","isArrEqual","prepareDataHelper","addData","hashData","schemaName","fs1","fs2","retArr","fs1Arr","getFilteredValues","sum","filteredNumber","totalSum","curr","avg","filteredValues","first","last","count","variance","mean","num","std","sqrt","fnList","getFieldArr","dimensions","getReducerObj","measures","defReducer","measureName","resolve","existingDataModel","sFieldArr","reducerObj","dbName","measureArr","hashMap","rowCount","defSortFn","a1","b1","merge","lo","mid","hi","sortFn","mainArr","auxArr","mergeSort","naturalJoinFilter","commonSchemaArr","retainTuple","dataModel1","dataModel2","rowDiffArr","diffStr","diffStsArr","getSortFn","dataType","sortType","retFunc","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","toLowerCase","groupData","groupedData","fieldVal","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","arg","label","applyStandardSort","sortMeta","isCallable","sortingFn","m","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","applyGroupSort","detail","sortedGroupMap","nextMap","shift","dataObj","sDetial","groupSortingIdx","standardSortingDetails","groupSortingDetails","pop","source","_fieldStoreName","_fieldspace","joinWith","unionWith","differenceWith","projectionClone","rejectionClone","_fieldConfig","fieldObj","removeChild","sibling","parent","sd","convertToNativeDate","escape","dtParams","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","parseInt","defaultRangeParser","l","nVal","getTokenDefinitions","daysDef","short","long","monthsDef","definitions","H","extract","getHours","toString","p","P","M","mins","getMinutes","S","getSeconds","K","ms","getMilliseconds","day","getDay","A","getDate","month","getMonth","B","y","substring","presentDate","presentYear","trunc","getFullYear","Y","getTokenFormalNames","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","occurrence","forwardChar","nDate","formattedStr","formattedVal","prototype","dateTimeStamp","extractTokenValue","dtParamSeq","noBreak","dtParamArr","resolverKey","resolverParams","resolverFn","param","resolvedVal","hasOwnProperty","apply","checkIfOnlyYear","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","occurrenceLength","extractValues","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","obj1","obj2","skipUndef","tgtArr","srcArr","srcVal","tgtVal","cRef","isObject","isString","uniqueValues","round","random","arr1","arr2","ReducerStore","reducer","__unregister","defineProperties","enumerable","configurable","writable","_formattedValue","_internalValue"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,CAAC;AACD,O;ACVA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;;;AAGA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA,kDAA0C,gCAAgC;AAC1E;AACA;;AAEA;AACA;AACA;AACA,gEAAwD,kBAAkB;AAC1E;AACA,yDAAiD,cAAc;AAC/D;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,iDAAyC,iCAAiC;AAC1E,wHAAgH,mBAAmB,EAAE;AACrI;AACA;;AAEA;AACA;AACA;AACA,mCAA2B,0BAA0B,EAAE;AACvD,yCAAiC,eAAe;AAChD;AACA;AACA;;AAEA;AACA,8DAAsD,+DAA+D;;AAErH;AACA;;;AAGA;AACA;;;;;;;;;;;;;AClFA;AAAA;AAAe,SAASA,QAAT,CAAkBC,MAAlB,EAA0B;AACvC,OAAK,IAAIC,GAAT,IAAgBD,MAAhB,EAAwB;AACtB,QAAIE,QAAQF,OAAOC,GAAP,EAAYE,IAAZ,EAAZ;AAAA,QAAgCC,MAAhC;AACA,QAAI,CAACF,KAAL,EAAYA,QAAQ,IAAR,CAAZ,KACK,IAAIA,UAAU,MAAd,EAAsBA,QAAQ,IAAR,CAAtB,KACA,IAAIA,UAAU,OAAd,EAAuBA,QAAQ,KAAR,CAAvB,KACA,IAAIA,UAAU,KAAd,EAAqBA,QAAQG,GAAR,CAArB,KACA,IAAI,CAACC,MAAMF,SAAS,CAACF,KAAhB,CAAL,EAA6BA,QAAQE,MAAR,CAA7B,KACA,IAAI,8FAA8FG,IAA9F,CAAmGL,KAAnG,CAAJ,EAA+GA,QAAQ,IAAIM,IAAJ,CAASN,KAAT,CAAR,CAA/G,KACA;AACLF,WAAOC,GAAP,IAAcC,KAAd;AACD;AACD,SAAOF,MAAP;AACD,C;;;;;;;;;;;;ACbD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAEA,IAAIS,MAAMC,oDAAGA,CAAC,GAAJ,CAAV;;AAEO,IAAIC,WAAWF,IAAIG,KAAnB;AACA,IAAIC,eAAeJ,IAAIK,SAAvB;AACA,IAAIC,YAAYN,IAAIO,MAApB;AACA,IAAIC,gBAAgBR,IAAIS,UAAxB;AACA,IAAIC,gBAAgBV,IAAIW,UAAxB,C;;;;;;;;;;;;ACRP;AAAA,IAAIC,MAAM,EAAV;AAAA,IACIC,MAAM,EADV;AAAA,IAEIC,QAAQ,EAFZ;AAAA,IAGIC,UAAU,EAHd;AAAA,IAIIC,SAAS,EAJb;;AAMA,SAASC,eAAT,CAAyBC,OAAzB,EAAkC;AAChC,SAAO,IAAIC,QAAJ,CAAa,GAAb,EAAkB,aAAaD,QAAQE,GAAR,CAAY,UAASC,IAAT,EAAeC,CAAf,EAAkB;AAClE,WAAOC,KAAKC,SAAL,CAAeH,IAAf,IAAuB,MAAvB,GAAgCC,CAAhC,GAAoC,GAA3C;AACD,GAFqC,EAEnCG,IAFmC,CAE9B,GAF8B,CAAb,GAEV,GAFR,CAAP;AAGD;;AAED,SAASC,eAAT,CAAyBR,OAAzB,EAAkCS,CAAlC,EAAqC;AACnC,MAAIpC,SAAS0B,gBAAgBC,OAAhB,CAAb;AACA,SAAO,UAASU,GAAT,EAAcN,CAAd,EAAiB;AACtB,WAAOK,EAAEpC,OAAOqC,GAAP,CAAF,EAAeN,CAAf,EAAkBJ,OAAlB,CAAP;AACD,GAFD;AAGD;;AAED;AACA,SAASW,YAAT,CAAsBC,IAAtB,EAA4B;AAC1B,MAAIC,YAAYC,OAAOC,MAAP,CAAc,IAAd,CAAhB;AAAA,MACIf,UAAU,EADd;;AAGAY,OAAKI,OAAL,CAAa,UAASN,GAAT,EAAc;AACzB,SAAK,IAAIO,MAAT,IAAmBP,GAAnB,EAAwB;AACtB,UAAI,EAAEO,UAAUJ,SAAZ,CAAJ,EAA4B;AAC1Bb,gBAAQkB,IAAR,CAAaL,UAAUI,MAAV,IAAoBA,MAAjC;AACD;AACF;AACF,GAND;;AAQA,SAAOjB,OAAP;AACD;;AAED,SAASmB,GAAT,CAAa5C,KAAb,EAAoB6C,KAApB,EAA2B;AACzB,MAAIC,IAAI9C,QAAQ,EAAhB;AAAA,MAAoB+C,SAASD,EAAEC,MAA/B;AACA,SAAOA,SAASF,KAAT,GAAiB,IAAIG,KAAJ,CAAUH,QAAQE,MAAR,GAAiB,CAA3B,EAA8Bf,IAA9B,CAAmC,CAAnC,IAAwCc,CAAzD,GAA6DA,CAApE;AACD;;AAED,SAASG,UAAT,CAAoBC,IAApB,EAA0B;AACxB,SAAOA,OAAO,CAAP,GAAW,MAAMN,IAAI,CAACM,IAAL,EAAW,CAAX,CAAjB,GACHA,OAAO,IAAP,GAAc,MAAMN,IAAIM,IAAJ,EAAU,CAAV,CAApB,GACAN,IAAIM,IAAJ,EAAU,CAAV,CAFJ;AAGD;;AAED,SAASC,UAAT,CAAoBC,IAApB,EAA0B;AACxB,MAAIC,QAAQD,KAAKE,WAAL,EAAZ;AAAA,MACIC,UAAUH,KAAKI,aAAL,EADd;AAAA,MAEIC,UAAUL,KAAKM,aAAL,EAFd;AAAA,MAGIC,eAAeP,KAAKQ,kBAAL,EAHnB;AAIA,SAAOxD,MAAMgD,IAAN,IAAc,cAAd,GACDH,WAAWG,KAAKS,cAAL,EAAX,EAAkC,CAAlC,IAAuC,GAAvC,GAA6CjB,IAAIQ,KAAKU,WAAL,KAAqB,CAAzB,EAA4B,CAA5B,CAA7C,GAA8E,GAA9E,GAAoFlB,IAAIQ,KAAKW,UAAL,EAAJ,EAAuB,CAAvB,CAApF,IACCJ,eAAe,MAAMf,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAA9C,GAAoDX,IAAIa,OAAJ,EAAa,CAAb,CAApD,GAAsE,GAAtE,GAA4Eb,IAAIe,YAAJ,EAAkB,CAAlB,CAA5E,GAAmG,GAAlH,GACDF,UAAU,MAAMb,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAA9C,GAAoDX,IAAIa,OAAJ,EAAa,CAAb,CAApD,GAAsE,GAAhF,GACAF,WAAWF,KAAX,GAAmB,MAAMT,IAAIS,KAAJ,EAAW,CAAX,CAAN,GAAsB,GAAtB,GAA4BT,IAAIW,OAAJ,EAAa,CAAb,CAA5B,GAA8C,GAAjE,GACA,EAJA,CADN;AAMD;;AAEc,yEAASS,SAAT,EAAoB;AACjC,MAAIC,WAAW,IAAIC,MAAJ,CAAW,QAAQF,SAAR,GAAoB,OAA/B,CAAf;AAAA,MACIG,YAAYH,UAAUI,UAAV,CAAqB,CAArB,CADhB;;AAGA,WAAS1D,KAAT,CAAe2D,IAAf,EAAqBnC,CAArB,EAAwB;AACtB,QAAIoC,OAAJ;AAAA,QAAa7C,OAAb;AAAA,QAAsBY,OAAOzB,UAAUyD,IAAV,EAAgB,UAASlC,GAAT,EAAcN,CAAd,EAAiB;AAC5D,UAAIyC,OAAJ,EAAa,OAAOA,QAAQnC,GAAR,EAAaN,IAAI,CAAjB,CAAP;AACbJ,gBAAUU,GAAV,EAAemC,UAAUpC,IAAID,gBAAgBE,GAAhB,EAAqBD,CAArB,CAAJ,GAA8BV,gBAAgBW,GAAhB,CAAvD;AACD,KAH4B,CAA7B;AAIAE,SAAKZ,OAAL,GAAeA,WAAW,EAA1B;AACA,WAAOY,IAAP;AACD;;AAED,WAASzB,SAAT,CAAmByD,IAAnB,EAAyBnC,CAAzB,EAA4B;AAC1B,QAAIG,OAAO,EAAX;AAAA,QAAe;AACXkC,QAAIF,KAAKtB,MADb;AAAA,QAEIyB,IAAI,CAFR;AAAA,QAEW;AACPC,QAAI,CAHR;AAAA,QAGW;AACPC,KAJJ;AAAA,QAIO;AACHC,UAAMJ,KAAK,CALf;AAAA,QAKkB;AACdK,UAAM,KANV,CAD0B,CAOT;;AAEjB;AACA,QAAIP,KAAKD,UAAL,CAAgBG,IAAI,CAApB,MAA2BjD,OAA/B,EAAwC,EAAEiD,CAAF;AACxC,QAAIF,KAAKD,UAAL,CAAgBG,IAAI,CAApB,MAA2BhD,MAA/B,EAAuC,EAAEgD,CAAF;;AAEvC,aAASM,KAAT,GAAiB;AACf,UAAIF,GAAJ,EAAS,OAAOvD,GAAP;AACT,UAAIwD,GAAJ,EAAS,OAAOA,MAAM,KAAN,EAAazD,GAApB;;AAET;AACA,UAAIU,CAAJ;AAAA,UAAOiD,IAAIN,CAAX;AAAA,UAAcO,CAAd;AACA,UAAIV,KAAKD,UAAL,CAAgBU,CAAhB,MAAuBzD,KAA3B,EAAkC;AAChC,eAAOmD,MAAMD,CAAN,IAAWF,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBnD,KAAlC,IAA2CgD,KAAKD,UAAL,CAAgB,EAAEI,CAAlB,MAAyBnD,KAA3E;AACA,YAAI,CAACQ,IAAI2C,CAAL,KAAWD,CAAf,EAAkBI,MAAM,IAAN,CAAlB,KACK,IAAI,CAACI,IAAIV,KAAKD,UAAL,CAAgBI,GAAhB,CAAL,MAA+BlD,OAAnC,EAA4CsD,MAAM,IAAN,CAA5C,KACA,IAAIG,MAAMxD,MAAV,EAAkB;AAAEqD,gBAAM,IAAN,CAAY,IAAIP,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBlD,OAA3B,EAAoC,EAAEkD,CAAF;AAAM;AAC/E,eAAOH,KAAKW,KAAL,CAAWF,IAAI,CAAf,EAAkBjD,IAAI,CAAtB,EAAyBoD,OAAzB,CAAiC,KAAjC,EAAwC,IAAxC,CAAP;AACD;;AAED;AACA,aAAOT,IAAID,CAAX,EAAc;AACZ,YAAI,CAACQ,IAAIV,KAAKD,UAAL,CAAgBvC,IAAI2C,GAApB,CAAL,MAAmClD,OAAvC,EAAgDsD,MAAM,IAAN,CAAhD,KACK,IAAIG,MAAMxD,MAAV,EAAkB;AAAEqD,gBAAM,IAAN,CAAY,IAAIP,KAAKD,UAAL,CAAgBI,CAAhB,MAAuBlD,OAA3B,EAAoC,EAAEkD,CAAF;AAAM,SAA1E,MACA,IAAIO,MAAMZ,SAAV,EAAqB;AAC1B,eAAOE,KAAKW,KAAL,CAAWF,CAAX,EAAcjD,CAAd,CAAP;AACD;;AAED;AACA,aAAO8C,MAAM,IAAN,EAAYN,KAAKW,KAAL,CAAWF,CAAX,EAAcP,CAAd,CAAnB;AACD;;AAED,WAAO,CAACG,IAAIG,OAAL,MAAkBzD,GAAzB,EAA8B;AAC5B,UAAIe,MAAM,EAAV;AACA,aAAOuC,MAAMvD,GAAN,IAAauD,MAAMtD,GAA1B;AAA+Be,YAAIQ,IAAJ,CAAS+B,CAAT,GAAaA,IAAIG,OAAjB;AAA/B,OACA,IAAI3C,KAAK,CAACC,MAAMD,EAAEC,GAAF,EAAOsC,GAAP,CAAP,KAAuB,IAAhC,EAAsC;AACtCpC,WAAKM,IAAL,CAAUR,GAAV;AACD;;AAED,WAAOE,IAAP;AACD;;AAED,WAAS6C,aAAT,CAAuB7C,IAAvB,EAA6BZ,OAA7B,EAAsC;AACpC,WAAOY,KAAKV,GAAL,CAAS,UAASQ,GAAT,EAAc;AAC5B,aAAOV,QAAQE,GAAR,CAAY,UAASe,MAAT,EAAiB;AAClC,eAAOyC,YAAYhD,IAAIO,MAAJ,CAAZ,CAAP;AACD,OAFM,EAEJV,IAFI,CAECgC,SAFD,CAAP;AAGD,KAJM,CAAP;AAKD;;AAED,WAASlD,MAAT,CAAgBuB,IAAhB,EAAsBZ,OAAtB,EAA+B;AAC7B,QAAIA,WAAW,IAAf,EAAqBA,UAAUW,aAAaC,IAAb,CAAV;AACrB,WAAO,CAACZ,QAAQE,GAAR,CAAYwD,WAAZ,EAAyBnD,IAAzB,CAA8BgC,SAA9B,CAAD,EAA2CoB,MAA3C,CAAkDF,cAAc7C,IAAd,EAAoBZ,OAApB,CAAlD,EAAgFO,IAAhF,CAAqF,IAArF,CAAP;AACD;;AAED,WAAShB,UAAT,CAAoBqB,IAApB,EAA0BZ,OAA1B,EAAmC;AACjC,QAAIA,WAAW,IAAf,EAAqBA,UAAUW,aAAaC,IAAb,CAAV;AACrB,WAAO6C,cAAc7C,IAAd,EAAoBZ,OAApB,EAA6BO,IAA7B,CAAkC,IAAlC,CAAP;AACD;;AAED,WAASd,UAAT,CAAoBmB,IAApB,EAA0B;AACxB,WAAOA,KAAKV,GAAL,CAAS0D,SAAT,EAAoBrD,IAApB,CAAyB,IAAzB,CAAP;AACD;;AAED,WAASqD,SAAT,CAAmBlD,GAAnB,EAAwB;AACtB,WAAOA,IAAIR,GAAJ,CAAQwD,WAAR,EAAqBnD,IAArB,CAA0BgC,SAA1B,CAAP;AACD;;AAED,WAASmB,WAAT,CAAqBnF,KAArB,EAA4B;AAC1B,WAAOA,SAAS,IAAT,GAAgB,EAAhB,GACDA,iBAAiBM,IAAjB,GAAwB6C,WAAWnD,KAAX,CAAxB,GACAiE,SAAS5D,IAAT,CAAcL,SAAS,EAAvB,IAA6B,OAAOA,MAAMiF,OAAN,CAAc,IAAd,EAAoB,MAApB,CAAP,GAAqC,IAAlE,GACAjF,KAHN;AAID;;AAED,SAAO;AACLU,WAAOA,KADF;AAELE,eAAWA,SAFN;AAGLE,YAAQA,MAHH;AAILE,gBAAYA,UAJP;AAKLE,gBAAYA;AALP,GAAP;AAOD,C;;;;;;;;;;;;ACjKD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;;;;;;;;;;;;ACFA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAEA,IAAIoE,MAAM9E,oDAAGA,CAAC,IAAJ,CAAV;;AAEO,IAAI+E,WAAWD,IAAI5E,KAAnB;AACA,IAAI8E,eAAeF,IAAI1E,SAAvB;AACA,IAAI6E,YAAYH,IAAIxE,MAApB;AACA,IAAI4E,gBAAgBJ,IAAItE,UAAxB;AACA,IAAI2E,gBAAgBL,IAAIpE,UAAxB,C;;;;;;;;;;;;;;;;;;;;;;;ACRP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;;AAGO,IAAM0E,cAAc,aAApB;;AAEP;;;AAGO,IAAMC,SAAS,QAAf;;AAEP;;;AAGO,IAAMC,iBAAiB;AAC1BC,YAAQ,QADkB;AAE1BC,aAAS,SAFiB;AAG1BC,aAAS,OAHiB;AAI1BC,aAAS,SAJiB;AAK1BC,aAAS,oBALiB;AAM1BC,SAAK,KANqB;AAO1BC,UAAM;AAPoB,CAAvB;;AAUA,IAAMC,QAAQ;AACjBC,WAAO,OADU;AAEjBC,eAAW,WAFM;AAGjBC,gBAAY,YAHK;AAIjBC,aAAS,SAJQ;AAKjBC,eAAW;AALM,CAAd;;AAQA,IAAMC,oBAAoB;AAC7BC,SAAK,KADwB;AAE7BC,QAAI;AAFyB,CAA1B,C;;;;;;;;;;;;;;;;;;;AChCP;AACA;;IAEMC,kB;AACF,kCAAc;AAAA;;AACV,aAAKC,KAAL,GAAa,IAAIC,GAAJ,EAAb;AACA,aAAKC,UAAL,CAAgB,KAAKC,qBAAL,EAAhB;AACH;;;;gDAEuB;AACpB,mBAAO,CACH,IAAIC,qEAAJ,EADG,EAEH,IAAIC,oEAAJ,EAFG,EAGH,IAAIC,gEAAJ,EAHG,EAIH,IAAIC,oEAAJ,EAJG,CAAP;AAMH;;AAED;;;;;;;;qCAK4B;AAAA;;AAAA,gBAAjBL,WAAiB,uEAAJ,EAAI;;AACxBA,wBAAWzE,OAAX,CAAmB;AAAA,uBAAa,MAAKuE,KAAL,CAAWQ,GAAX,CAAeC,UAAUC,IAAzB,EAA+BD,SAA/B,CAAb;AAAA,aAAnB;AACA,mBAAO,KAAKT,KAAZ;AACH;;AAED;;;;;;;;iCAKSS,S,EAAW;AAChB,gBAAIA,qBAAqBE,4DAAzB,EAAwC;AACpC,qBAAKX,KAAL,CAAWQ,GAAX,CAAeC,UAAUC,IAAzB,EAA+BD,SAA/B;AACA,uBAAO,IAAP;AACH;AACD,mBAAO,IAAP;AACH;;AAED;;;;;;;;mCAMWA,S,EAAW;AAClB,iBAAKT,KAAL,CAAWY,MAAX,CAAkBH,UAAUC,IAA5B;AACA,mBAAO,IAAP;AACH;;;4BAEG9F,I,EAAM;AACN,gBAAI,KAAKoF,KAAL,CAAWa,GAAX,CAAejG,IAAf,CAAJ,EAA0B;AACtB,uBAAO,KAAKoF,KAAL,CAAWc,GAAX,CAAelG,IAAf,CAAP;AACH;AACD,mBAAO,IAAP;AACH;;;;;;AAIL,IAAMmG,iBAAkB,YAAY;AAChC,QAAIf,QAAQ,IAAZ;;AAEA,aAASgB,QAAT,GAAqB;AACjBhB,gBAAQ,IAAID,kBAAJ,EAAR;AACA,eAAOC,KAAP;AACH;AACD,WAAOA,SAASgB,UAAhB;AACH,CARuB,EAAxB;;AAUeD,6EAAf,E;;;;;;;;;;;;;;;;;;;;;;;;ACvEA;AACA;AACA;;IAEqBR,iB;;;AACjB,iCAAc;AAAA;;AAAA,qIACJU,0DAAUA,CAACC,IADP;AAEb;;;;gCAEOC,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOH,oEAAIA,CAACC,IAAL,EAAWC,MAAX,EAAmBC,OAAnB,CAAP;AACH;;;;EAP0CV,4D;;AAA1BJ,gF;;;;;;;;;;;;;;;;;;;;;;;;ACJrB;AACA;AACA;;IAEqBF,iB;;;AACjB,iCAAc;AAAA;;AAAA,qIACJY,0DAAUA,CAACK,OADP;AAEb;;;;gCAEOH,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOE,8DAAMA,CAACJ,IAAP,EAAaC,MAAb,EAAqBC,OAArB,CAAP;AACH;;;;EAP0CV,4D;;AAA1BN,gF;;;;;;;;;;;;;;;;;;;;;;;;ACJrB;AACA;AACA;;IAEqBD,kB;;;AACjB,kCAAc;AAAA;;AAAA,uIACJa,0DAAUA,CAACO,OADP;AAEb;;;;gCAEOL,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOI,8DAAMA,CAACN,IAAP,EAAaC,MAAb,EAAqBC,OAArB,CAAP;AACH;;;;EAP2CV,4D;;AAA3BP,iF;;;;;;;;;;;;ACJrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;ACFA;AACA;AACA;;IAEqBE,a;;;AACjB,6BAAc;AAAA;;AAAA,6HACJW,0DAAUA,CAACS,SADP;AAEb;;;;gCAEOP,I,EAAMC,M,EAAQC,O,EAAS;AAC3B,mBAAOM,gEAAQA,CAACR,IAAT,EAAeC,MAAf,EAAuBC,OAAvB,CAAP;AACH;;;;EAPsCV,4D;;AAAtBL,4E;;;;;;;;;;;;ACJrB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;;;;;;;;;;;;;;;;;;ACDA;;;IAGqBK,a;AACjB,2BAAYD,IAAZ,EAAkB;AAAA;;AACd,aAAKkB,KAAL,GAAalB,IAAb;AACH;;;;kCAMS;AACN,kBAAM,IAAImB,KAAJ,CAAU,iCAAV,CAAN;AACH;;;4BANU;AACP,mBAAO,KAAKD,KAAZ;AACH;;;;;;AAPgBjB,4E;;;;;;;;;;;;ACHrB;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;AAEA;;;;;;;AAOA,SAASmB,IAAT,CAAeX,IAAf,EAAqBC,MAArB,EAA6BC,OAA7B,EAAsC;AAClC,QAAMnB,aAAa,EAAEyB,4DAAF,EAAYF,wDAAZ,EAAoBF,wDAApB,EAAnB;AACA,QAAMQ,aAAaC,+DAAgBA,CAACb,IAAjB,CAAnB;;AAEA,QAAI,CAACY,UAAL,EAAiB;AACb,cAAM,IAAIF,KAAJ,CAAU,kCAAV,CAAN;AACH;;AAED,WAAO3B,WAAW6B,UAAX,EAAuBZ,IAAvB,EAA6BC,MAA7B,EAAqCC,OAArC,CAAP;AACH;;AAEcS,mEAAf,E;;;;;;;;;;;;;;;;ACvBA;;AAEA;;;;;;;;;;;;;;;;;AAiBA,SAASP,MAAT,CAAgBU,GAAhB,EAAqBb,MAArB,EAA6BC,OAA7B,EAAsC;AAClC,QAAI,CAACrF,MAAMkG,OAAN,CAAcd,MAAd,CAAL,EAA4B;AACxB,cAAM,IAAIS,KAAJ,CAAU,+CAAV,CAAN;AACH;AACD,QAAMM,gBAAgB;AAClBC,wBAAgB;AADE,KAAtB;AAGA,QAAMC,eAAejB,OAAOzG,GAAP,CAAW;AAAA,eAAc2H,WAAW1H,IAAzB;AAAA,KAAX,CAArB;AACAyG,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBJ,aAAlB,EAAiCd,OAAjC,CAAV;;AAEA,QAAM5G,UAAU,EAAhB;AACA,QAAMkB,OAAO6G,0DAAWA,CAAC/H,OAAZ,CAAb;;AAEA,QAAIgI,UAAUJ,YAAd;AACA,QAAIhB,QAAQe,cAAZ,EAA4B;AACxB;AACA;AACAK,kBAAUR,IAAIS,MAAJ,CAAW,CAAX,EAAc,CAAd,EAAiB,CAAjB,CAAV;AACH;AACD;AACA,QAAMC,YAAYF,QAAQG,MAAR,CAAe,UAACC,GAAD,EAAMC,CAAN,EAASjI,CAAT;AAAA,eAC7BU,OAAOgH,MAAP,CAAcM,GAAd,sBAAsBC,CAAtB,EAA0BjI,CAA1B,EAD6B;AAAA,KAAf,EAEf,EAFe,CAAlB;;AAIAoH,QAAIxG,OAAJ,CAAY,UAACsH,MAAD,EAAY;AACpB,YAAMC,QAAQ,EAAd;AACAX,qBAAa5G,OAAb,CAAqB,UAACwH,WAAD,EAAiB;AAClC,gBAAMC,YAAYP,UAAUM,WAAV,CAAlB;AACAD,kBAAMrH,IAAN,CAAWoH,OAAOG,SAAP,CAAX;AACH,SAHD;AAIA,eAAOvH,sBAAQqH,KAAR,CAAP;AACH,KAPD;AAQA,WAAO,CAACX,YAAD,EAAe5H,OAAf,CAAP;AACH;;AAEc8G,qEAAf,E;;;;;;;;;;;;ACtDA;AAAA;AAAA;AAAA;AACA;;AAEA;;;;;;;;;;;;;;;;;;;;;AAqBA,SAASE,MAAT,CAAiB0B,GAAjB,EAAsB/B,MAAtB,EAA8BC,OAA9B,EAAuC;AACnC,QAAMc,gBAAgB;AAClBC,wBAAgB,IADE;AAElBgB,wBAAgB;AAFE,KAAtB;AAIA/B,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBJ,aAAlB,EAAiCd,OAAjC,CAAV;;AAEA,QAAM7H,MAAM6J,wDAAKA,CAAChC,QAAQ+B,cAAd,CAAZ;AACA,WAAO7B,wDAAMA,CAAC/H,IAAII,SAAJ,CAAcuJ,GAAd,CAAP,EAA2B/B,MAA3B,EAAmCC,OAAnC,CAAP;AACH;;AAEcI,qEAAf,E;;;;;;;;;;;;ACnCA;AAAA;AAAA;;AAEA;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BA,SAASE,QAAT,CAAmBM,GAAnB,EAAwBb,MAAxB,EAAgC;AAC5B,QAAI,CAACpF,MAAMkG,OAAN,CAAcd,MAAd,CAAL,EAA4B;AACxB,cAAM,IAAIS,KAAJ,CAAU,+CAAV,CAAN;AACH;;AAED,QAAMyB,SAAS,EAAf;AACA,QAAIzI,IAAI,CAAR;AACA,QAAI0I,uBAAJ;AACA,QAAM9I,UAAU,EAAhB;AACA,QAAMkB,OAAO6G,0DAAWA,CAAC/H,OAAZ,CAAb;AACA,QAAM+I,mBAAmBpC,OAAOzG,GAAP,CAAW;AAAA,eAAc2H,WAAW1H,IAAzB;AAAA,KAAX,CAAzB;;AAEAqH,QAAIxG,OAAJ,CAAY,UAACgI,IAAD,EAAU;AAClB,YAAMV,SAAS,EAAf;AACAS,yBAAiB/H,OAAjB,CAAyB,UAAC6G,UAAD,EAAgB;AACrC,gBAAIA,cAAcgB,MAAlB,EAA0B;AACtBC,iCAAiBD,OAAOhB,UAAP,CAAjB;AACH,aAFD,MAEO;AACHgB,uBAAOhB,UAAP,IAAqBzH,GAArB;AACA0I,iCAAiB1I,IAAI,CAArB;AACH;AACDkI,mBAAOQ,cAAP,IAAyBE,KAAKnB,UAAL,CAAzB;AACH,SARD;AASA3G,8BAAQoH,MAAR;AACH,KAZD;;AAcA,WAAO,CAACxH,OAAOmI,IAAP,CAAYJ,MAAZ,CAAD,EAAsB7I,OAAtB,CAAP;AACH;;AAEckH,uEAAf,E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACzDA;;AAEA;AACA;AAYA;AACA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;;;;;;IAYMgC,S;;;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoCA,yBAAsB;AAAA;;AAAA;;AAAA,0CAANC,IAAM;AAANA,gBAAM;AAAA;;AAAA,qJACTA,IADS;;AAGlB,cAAKC,cAAL,GAAsB,EAAtB;AAHkB;AAIrB;;AAED;;;;;;;;;;;;;;;AA0CA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;gCAgCSxC,O,EAAS;AACd,gBAAMyC,aAAa;AACfC,uBAAO,KADQ;AAEfC,2BAAW,IAFI;AAGfC,yBAAS,KAHM;AAIfC,8BAAc,KAJC;AAKfC,sBAAM;AALS,aAAnB;AAOA9C,sBAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBuB,UAAlB,EAA8BzC,OAA9B,CAAV;AACA,gBAAM0B,SAAS,KAAKqB,oBAAL,GAA4BrB,MAA3C;;AAEA,gBAAMsB,gBAAgBC,qDAAWA,CAACC,IAAZ,CAClB,IADkB,EAElB,KAAKH,oBAAL,GAA4BrB,MAFV,EAGlB,KAAKyB,WAHa,EAIlBnD,QAAQ6C,YAAR,GAAuBnB,OAAOpI,GAAP,CAAW;AAAA,uBAAK8J,EAAE7J,IAAF,EAAL;AAAA,aAAX,EAA0BI,IAA1B,EAAvB,GAA0D,KAAK0J,cAJ7C,EAKlBrD,QAAQ8C,IALU,EAMlB;AACIQ,4BAAYtD,QAAQ0C,KAAR,KAAkB,QADlC;AAEIa,wBAAQ,CAAC,CAACvD,QAAQ4C;AAFtB,aANkB,CAAtB;;AAYA,gBAAI,CAAC5C,QAAQ2C,SAAb,EAAwB;AACpB,uBAAOK,aAAP;AACH;;AAzBa,2BA2BQhD,OA3BR;AAAA,gBA2BN2C,SA3BM,YA2BNA,SA3BM;AAAA,gBA4BN7C,IA5BM,GA4BiBkD,aA5BjB,CA4BNlD,IA5BM;AAAA,gBA4BAC,MA5BA,GA4BiBiD,aA5BjB,CA4BAjD,MA5BA;AAAA,gBA4BQyD,IA5BR,GA4BiBR,aA5BjB,CA4BQQ,IA5BR;;AA6Bd,gBAAMC,aAAa1D,OAAOzG,GAAP,CAAY;AAAA,uBAAKoK,EAAEnK,IAAP;AAAA,aAAZ,CAAnB;AACA,gBAAMoK,gBAAgBzJ,OAAOmI,IAAP,CAAYM,SAAZ,CAAtB;AACA,gBAAMiB,cAAcD,cAAcpC,MAAd,CAAqB,UAACC,GAAD,EAAMqC,IAAN,EAAe;AACpD,oBAAMC,MAAML,WAAWM,OAAX,CAAmBF,IAAnB,CAAZ;AACA,oBAAIC,QAAQ,CAAC,CAAb,EAAgB;AACZtC,wBAAIlH,IAAJ,CAAS,CAACwJ,GAAD,EAAMnB,UAAUkB,IAAV,CAAN,CAAT;AACH;AACD,uBAAOrC,GAAP;AACH,aANmB,EAMjB,EANiB,CAApB;;AAQA,gBAAIxB,QAAQ0C,KAAR,KAAkB,QAAtB,EAAgC;AAC5BkB,4BAAYxJ,OAAZ,CAAoB,UAAC4J,IAAD,EAAU;AAC1B,wBAAMC,OAAOD,KAAK,CAAL,CAAb;AACA,wBAAME,QAAQF,KAAK,CAAL,CAAd;;AAEAlE,yBAAKmE,IAAL,EAAW7J,OAAX,CAAmB,UAAC+J,KAAD,EAAQC,QAAR,EAAqB;AACpCtE,6BAAKmE,IAAL,EAAWG,QAAX,IAAuBF,MAAMhB,IAAN,CACnBmB,SADmB,EAEnBF,KAFmB,EAGnBX,KAAKY,QAAL,CAHmB,EAInBrE,OAAOkE,IAAP,CAJmB,CAAvB;AAMH,qBAPD;AAQH,iBAZD;AAaH,aAdD,MAcO;AACHnE,qBAAK1F,OAAL,CAAa,UAAC+J,KAAD,EAAQC,QAAR,EAAqB;AAC9BR,gCAAYxJ,OAAZ,CAAoB,UAAC4J,IAAD,EAAU;AAC1B,4BAAMC,OAAOD,KAAK,CAAL,CAAb;AACA,4BAAME,QAAQF,KAAK,CAAL,CAAd;;AAEAG,8BAAMF,IAAN,IAAcC,MAAMhB,IAAN,CACVmB,SADU,EAEVF,MAAMF,IAAN,CAFU,EAGVT,KAAKY,QAAL,CAHU,EAIVrE,OAAOkE,IAAP,CAJU,CAAd;AAMH,qBAVD;AAWH,iBAZD;AAaH;;AAED,mBAAOjB,aAAP;AACH;;AAED;;;;;;;;kCAKW;AACP,gBAAMsB,aAAa,KAAKnB,WAAxB;AACA,gBAAMoB,MAAM,EAAZ;;AAEA,gBAAID,WAAW5J,MAAf,EAAuB;AACnB,oBAAM8J,WAAWF,WAAWG,KAAX,CAAiB,GAAjB,CAAjB;;AAEAD,yBAASpK,OAAT,CAAiB,UAAC+E,GAAD,EAAS;AAAA,yCACHA,IAAIsF,KAAJ,CAAU,GAAV,EAAenL,GAAf,CAAmBoL,MAAnB,CADG;AAAA;AAAA,wBACjBC,KADiB;AAAA,wBACVC,GADU;;AAGtBA,0BAAMA,QAAQP,SAAR,GAAoBO,GAApB,GAA0BD,KAAhC;AACAJ,wBAAIjK,IAAJ,+BAAYK,MAAMiK,MAAMD,KAAN,GAAc,CAApB,EAAuBE,IAAvB,GAA8BvL,GAA9B,CAAkC,UAACwL,CAAD,EAAIhB,GAAJ;AAAA,+BAAYa,QAAQb,GAApB;AAAA,qBAAlC,CAAZ;AACH,iBALD;AAMH;;AAED,mBAAOS,GAAP;AACH;AACD;;;;;;;;;;;;;;;;;;;;;;;;;;;gCAwBSQ,S,EAAwD;AAAA,gBAA7CC,QAA6C,uEAAlC,EAAkC;AAAA,gBAA9BC,MAA8B,uEAArB,EAAEC,WAAW,IAAb,EAAqB;;AAC7D,gBAAMC,qBAAmBJ,UAAUpL,IAAV,EAAzB;AACA,gBAAIyL,SAAS,CAAC,IAAD,EAAOL,SAAP,EAAkBC,QAAlB,CAAb;AACA,gBAAMK,eAAeC,mEAAWF,MAAX,CAArB;;AAEAG,8EAAkBA,CACd,IADJ,EAEIF,YAFJ,EAGI5H,yDAAcA,CAACG,OAHnB,EAII,EAAEmH,oBAAF,EAAaI,4BAAb,EAA4BK,gBAAgBC,4DAAYA,CAACD,cAAb,EAA5C,EAJJ,EAKIR,QALJ;;AAQA,gBAAIC,OAAOC,SAAX,EAAsB;AAClBG,6BAAaK,SAAb,CAAuB,IAAvB;AACH,aAFD,MAEO;AACHL,6BAAaK,SAAb,CAAuB,IAAvB;AACH;;AAED,mBAAOL,YAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6BAmDMM,c,EAA+C;AAAA,gBAA/BV,MAA+B,uEAAtB,EAAEC,WAAW,KAAb,EAAsB;;AACjD,gBAAMU,UAAU,KAAKC,OAAL,CAAa;AACzBnD,uBAAO,KADkB;AAEzBI,sBAAM6C;AAFmB,aAAb,CAAhB;AAIA,gBAAM1D,SAAS2D,QAAQ7F,MAAR,CAAezG,GAAf,CAAmB;AAAA,uBAASqI,MAAMpI,IAAf;AAAA,aAAnB,CAAf;AACA,gBAAMuM,eAAe,CAAC7D,MAAD,EAASlF,MAAT,CAAgB6I,QAAQ9F,IAAxB,CAArB;;AAEA,gBAAMiG,WAAW,IAAI,KAAKC,WAAT,CAAqBF,YAArB,EAAmCF,QAAQ7F,MAA3C,EAAmD,EAAEW,YAAY,QAAd,EAAnD,CAAjB;;AAEA6E,8EAAkBA,CACd,IADJ,EAEIQ,QAFJ,EAGItI,yDAAcA,CAACO,IAHnB,EAIIiH,MAJJ,EAKIU,cALJ;;AAQA,gBAAIV,OAAOC,SAAX,EAAsB;AAClBa,yBAASL,SAAT,CAAmB,IAAnB;AACH,aAFD,MAEO;AACHK,yBAASL,SAAT,CAAmB,IAAnB;AACH;;AAED,mBAAOK,QAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;kCAqBW1G,I,EAAMW,O,EAAS;AACtBX,mBAAOA,QAAQ,KAAK4G,WAApB;AACAjG,sBAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkB,EAAEa,gBAAgB,GAAlB,EAAlB,EAA2C/B,OAA3C,CAAV;;AAEA,gBAAM0B,SAAS,KAAKwE,aAAL,GAAqBxE,MAApC;AACA,gBAAMyE,UAAUzE,OAAOpI,GAAP,CAAW;AAAA,uBAAKO,EAAEuM,aAAF,EAAL;AAAA,aAAX,CAAhB;AACA,gBAAMC,YAAYF,QAAQ,CAAR,EAAWzL,MAA7B;AACA,gBAAI4L,uBAAJ;AACA,gBAAIC,eAAJ;AACA,gBAAIC,eAAJ;;AAEA,gBAAInH,SAASO,iDAAUA,CAACS,SAAxB,EAAmC;AAC/BiG,iCAAiB,EAAjB;AACA,qBAAKC,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,MAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,4BAAI4H,OAAO8E,MAAP,EAAejN,IAAf,EAAJ,IAA6B4M,QAAQK,MAAR,EAAgBD,MAAhB,CAA7B;AACH;AACDD,mCAAehM,IAAf,CAAoBR,GAApB;AACH;AACJ,aATD,MASO,IAAIuF,SAASO,iDAAUA,CAACO,OAAxB,EAAiC;AACpCmG,iCAAiB,CAAC5E,OAAOpI,GAAP,CAAW;AAAA,2BAAKO,EAAEN,IAAF,EAAL;AAAA,iBAAX,EAA0BI,IAA1B,CAA+BqG,QAAQ+B,cAAvC,CAAD,CAAjB;AACA,qBAAKwE,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,OAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,6BAAIQ,IAAJ,CAAS6L,QAAQK,MAAR,EAAgBD,MAAhB,CAAT;AACH;AACDD,mCAAehM,IAAf,CAAoBR,KAAIH,IAAJ,CAASqG,QAAQ+B,cAAjB,CAApB;AACH;AACDuE,iCAAiBA,eAAe3M,IAAf,CAAoB,IAApB,CAAjB;AACH,aAVM,MAUA,IAAI0F,SAASO,iDAAUA,CAACK,OAAxB,EAAiC;AACpCqG,iCAAiB,CAAC5E,OAAOpI,GAAP,CAAW;AAAA,2BAAKO,EAAEN,IAAF,EAAL;AAAA,iBAAX,CAAD,CAAjB;AACA,qBAAKgN,SAAS,CAAd,EAAiBA,SAASF,SAA1B,EAAqCE,QAArC,EAA+C;AAC3C,wBAAMzM,QAAM,EAAZ;AACA,yBAAK0M,SAAS,CAAd,EAAiBA,SAAS9E,OAAOhH,MAAjC,EAAyC8L,QAAzC,EAAmD;AAC/C1M,8BAAIQ,IAAJ,CAAS6L,QAAQK,MAAR,EAAgBD,MAAhB,CAAT;AACH;AACDD,mCAAehM,IAAf,CAAoBR,KAApB;AACH;AACJ,aATM,MASA;AACH,sBAAM,IAAI0G,KAAJ,gBAAuBnB,IAAvB,uBAAN;AACH;;AAED,mBAAOiH,cAAP;AACH;;;iCAES3E,K,EAAO;AACb,gBAAM8E,YAAY9E,MAAMpI,IAAN,EAAlB;AACA,iBAAK8J,cAAL,UAA2BoD,SAA3B;AACA,gBAAMC,oBAAoB,KAAKC,kBAA/B;AACA,gBAAMC,qBAAqBF,kBAAkBG,mBAA7C;AACA,gBAAMT,gBAAgBzE,MAAMyE,aAAN,EAAtB;AACA,gBAAMR,UAAUjE,MAAMmF,YAAN,CAAmBhH,IAAnC;;AAEA,gBAAI,CAAC4G,kBAAkBK,SAAlB,GAA8BpF,MAAMpI,IAAN,EAA9B,CAAL,EAAkD;AAC9CmN,kCAAkBhF,MAAlB,CAAyBpH,IAAzB,CAA8BqH,KAA9B;AACAiF,mCAAmBxM,OAAnB,CAA2B,UAAC4M,GAAD,EAAMxN,CAAN,EAAY;AACnCwN,wBAAIrF,MAAMpI,IAAN,EAAJ,IAAoB,IAAI0N,8CAAJ,CAAUb,cAAc5M,CAAd,CAAV,EAA4BoM,QAAQpM,CAAR,CAA5B,EAAwCmI,KAAxC,CAApB;AACH,iBAFD;AAGH,aALD,MAKO;AACH,oBAAMuF,aAAaR,kBAAkBhF,MAAlB,CAAyByF,SAAzB,CAAmC;AAAA,2BAAaC,UAAU7N,IAAV,OAAqBkN,SAAlC;AAAA,iBAAnC,CAAnB;AACAS,8BAAc,CAAd,KAAoBR,kBAAkBhF,MAAlB,CAAyBwF,UAAzB,IAAuCvF,KAA3D;AACH;;AAED;AACA+E,8BAAkBW,gBAAlB,GAAqC,IAArC;AACAX,8BAAkBY,gBAAlB,GAAqC,IAArC;AACAZ,8BAAkBa,cAAlB,GAAmC,IAAnC;;AAEA,iBAAKC,qBAAL,GAA6BC,qBAA7B;AACA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0CAoCmB1H,M,EAAQ2H,U,EAAYzC,M,EAAQ;AAAA;;AAC3ClF,qBAAS4H,kEAAkBA,CAAC5H,MAAnB,CAAT;AACAkF,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkB,EAAEgE,WAAW,IAAb,EAAmB0C,YAAY,KAA/B,EAAlB,EAA0D3C,MAA1D,CAAT;;AAEA,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;AACA,gBAAMC,UAAUL,WAAW/K,KAAX,CAAiB,CAAjB,EAAoB+K,WAAWhN,MAAX,GAAoB,CAAxC,CAAhB;AACA,gBAAMsN,aAAaN,WAAWA,WAAWhN,MAAX,GAAoB,CAA/B,CAAnB;;AAEA,gBAAImN,aAAa9H,OAAOxG,IAApB,KAA6B,CAAC0L,OAAO2C,UAAzC,EAAqD;AACjD,sBAAM,IAAIpH,KAAJ,CAAaT,OAAOxG,IAApB,wCAAN;AACH;;AAED,gBAAM0O,kBAAkBF,QAAQzO,GAAR,CAAY,UAACqI,KAAD,EAAW;AAC3C,oBAAMuG,YAAYL,aAAalG,KAAb,CAAlB;AACA,oBAAI,CAACuG,SAAL,EAAgB;AACZ;AACA,0BAAM,IAAI1H,KAAJ,CAAamB,KAAb,kCAAN;AACH;AACD,uBAAOuG,UAAUC,KAAjB;AACH,aAPuB,CAAxB;;AASA,gBAAMC,QAAQ,KAAKA,KAAL,CAAWnD,OAAOC,SAAlB,CAAd;;AAEA,gBAAMmD,KAAKD,MAAMlC,aAAN,GAAsBxE,MAAjC;AACA,gBAAM4G,iBAAiBL,gBAAgB3O,GAAhB,CAAoB;AAAA,uBAAO+O,GAAGvE,GAAH,CAAP;AAAA,aAApB,CAAvB;;AAEA,gBAAIyE,cAAc,EAAlB;AACA,gBAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,uBAAM,OAAKC,YAAL,EAAN;AAAA,aAApB;;AAEA,gBAAMC,iBAAiB,EAAvB;AACAC,gFAAkBA,CAACP,MAAMjF,WAAzB,EAAsC,UAAC3J,CAAD,EAAO;AACzC,oBAAMoP,aAAaN,eAAehP,GAAf,CAAmB;AAAA,2BAASqI,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAAT;AAAA,iBAAnB,CAAnB;AACAkP,+BAAelP,CAAf,IAAoBwO,+CAAcY,UAAd,UAA0BpP,CAA1B,EAA6BgP,aAA7B,EAA4CD,WAA5C,GAApB;AACH,aAHD;;AA9B2C,gCAkC3BM,mEAAYA,CAAC,CAACH,cAAD,CAAb,EAA+B,CAAC3I,MAAD,CAA/B,EAAyC,CAACA,OAAOxG,IAAR,CAAzC,CAlC2B;AAAA;AAAA,gBAkCpCoI,KAlCoC;;AAmC3CyG,kBAAMU,QAAN,CAAenH,KAAf;;AAEA4D,8EAAkBA,CACd,IADJ,EAEI6C,KAFJ,EAGI3K,yDAAcA,CAACK,OAHnB,EAII,EAAEmH,QAAQlF,MAAV,EAAkB2B,QAAQqG,OAA1B,EAJJ,EAKIC,UALJ;;AAQA,mBAAOI,KAAP;AACH;;AAED;;;;;;;;;;;kCAQWW,W,EAA2D;AAAA,gBAA9C9D,MAA8C,uEAArC,EAAqC;AAAA,gBAAjC+D,cAAiC;AAAA,gBAAjBC,UAAiB,uEAAJ,EAAI;;AAClE,gBAAMC,kBAAkBjE,OAAOiE,eAA/B;AACA,gBAAMC,sBAAsBlE,OAAOmE,QAAnC;AACA,gBAAMC,UAAUpE,OAAOoE,OAAvB;AACA,gBAAMC,YAAYC,gEAAgBA,CAAC,IAAjB,CAAlB;AACA,gBAAMC,uBAAuBF,UAAUG,qBAAvC;AACA,gBAAMC,mBAAmBC,mEAAmBA,CAAC,IAApB,CAAzB;AACA,gBAAMC,aAAa;AACfC,8BAAcH,gBADC;AAEfI,uBAAOR;AAFQ,aAAnB;;AAKAN,8BAAkBe,kEAAkBA,CAACP,oBAAnB,EAAyCvE,MAAzC,EAAiD,IAAjD,CAAlB;AACA+E,oFAAwBA,CAACjB,WAAzB,EAAsCa,UAAtC,EAAkD,EAAEJ,0CAAF;AAC9CJ,0BAAUD,mBADoC;AAE9Cc,mCAAmB,IAF2B,EAAlD,EAGI/P,OAAOgH,MAAP,CAAc;AACVmI;AADU,aAAd,EAEGpE,MAFH,CAHJ;;AAOA,gBAAIiE,eAAJ,EAAqB;AACjBgB,yFAAyBA,CAACV,oBAA1B,EAAgDF,SAAhD,EAA2D;AACvDrE,kCADuD;AAEvDgE;AAFuD,iBAA3D,EAGG,IAHH;AAIH;;AAED,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;2BAOIkB,S,EAAWC,Q,EAAU;AACrB,oBAAQD,SAAR;AACA,qBAAK5M,sDAAL;AACI,yBAAKiF,cAAL,CAAoBlI,IAApB,CAAyB8P,QAAzB;AACA;AAHJ;AAKA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;oCAMaD,S,EAAW;AACpB,oBAAQA,SAAR;AACA,qBAAK5M,sDAAL;AACI,yBAAKiF,cAAL,GAAsB,EAAtB;AACA;;AAHJ;AAMA,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;0CAOmB6H,S,EAAWhB,O,EAAS;AAAA;;AACnC,gBAAIiB,gBAAgB,KAAK9H,cAAzB;AACA8H,0BAAclQ,OAAd,CAAsB;AAAA,uBAAMmQ,GAAGrH,IAAH,CAAQ,MAAR,EAAcmH,SAAd,EAAyBhB,OAAzB,CAAN;AAAA,aAAtB;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;4BA2CKmB,gB,EAAkBvF,M,EAAQ;AAC3B,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;;AAEA,gBAAI,CAACD,aAAa2C,gBAAb,CAAL,EAAqC;AACjC,sBAAM,IAAIhK,KAAJ,YAAmBgK,gBAAnB,qBAAN;AACH;;AAED,gBAAMC,eAAexF,OAAO1L,IAAP,IAAkBiR,gBAAlB,YAArB;;AAEA,gBAAI3C,aAAa4C,YAAb,CAAJ,EAAgC;AAC5B,sBAAM,IAAIjK,KAAJ,YAAmBiK,YAAnB,qBAAN;AACH;;AAED,gBAAMC,eAAe,KAAKxE,aAAL,GAAqBa,SAArB,GAAiCyD,gBAAjC,CAArB;;AAb2B,wCAcEG,sFAAqBA,CAACD,YAAtB,EAAoC,KAAKvH,WAAzC,EAAsD8B,MAAtD,CAdF;AAAA,gBAcnB2F,UAdmB,yBAcnBA,UAdmB;AAAA,gBAcPC,IAdO,yBAcPA,IAdO;;AAgB3B,gBAAMC,WAAWjC,mEAAYA,CAAC,CAAC+B,UAAD,CAAb,EAA2B,CACxC;AACIrR,sBAAMkR,YADV;AAEIpL,sBAAM0L,gDAASA,CAACC,SAFpB;AAGIC,yBAASC,uDAAgBA,CAACC,MAH9B;AAIIN;AAJJ,aADwC,CAA3B,EAMT,CAACJ,YAAD,CANS,EAMO,CANP,CAAjB;;AAQA,gBAAMrC,QAAQ,KAAKA,KAAL,CAAWnD,OAAOC,SAAlB,CAAd;AACAkD,kBAAMU,QAAN,CAAegC,QAAf;;AAEAvF,8EAAkBA,CACd,IADJ,EAEI6C,KAFJ,EAGI3K,yDAAcA,CAACM,GAHnB,EAIK,EAAEyM,kCAAF,EAAoBvF,cAApB,EAA4BwF,0BAA5B,EAJL,EAKK,IALL;;AAQA,mBAAOrC,KAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;uCAuBgB;AACZ,gBAAMtI,OAAO,KAAKsL,SAAL,CAAexL,iDAAUA,CAACS,SAA1B,CAAb;AACA,gBAAMN,SAAS,KAAKsL,SAAL,EAAf;;AAEA,mBAAO,IAAI/I,SAAJ,CAAcxC,IAAd,EAAoBC,MAApB,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;mCA0CYuL,Y,EAAcC,S,EAAWtG,M,EAAQ;AACzC,gBAAM4C,eAAe,KAAKC,eAAL,EAArB;;AAEAwD,yBAAalR,OAAb,CAAqB,UAACqM,SAAD,EAAe;AAChC,oBAAI,CAACoB,aAAapB,SAAb,CAAL,EAA8B;AAC1B,0BAAM,IAAIjG,KAAJ,YAAmBiG,SAAnB,mCAAN;AACH;AACJ,aAJD;;AAMA,gBAAM+E,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdzG,2BAAW;AAFG,aAAlB;;AAKAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBsK,SAAlB,EAA6BvG,MAA7B,CAAT;;AAEA,mBAAO2G,+DAAeA,CAAC,IAAhB,EAAsBN,YAAtB,EAAoCC,SAApC,EAA+CtG,MAA/C,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;wCAoC6D;AAAA,gBAA9C4G,YAA8C,uEAA/B,EAA+B;AAAA,gBAA3BC,YAA2B,uEAAZ,EAAY;AAAA,gBAAR7G,MAAQ;;AACzD,gBAAMuG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdzG,2BAAW;AAFG,aAAlB;AAIA,gBAAM6G,cAAc,KAAKjE,eAAL,EAApB;AACA,gBAAMkE,YAAY9R,OAAOmI,IAAP,CAAY0J,WAAZ,CAAlB;AACA,gBAAME,0BAA0B,CAAC,CAACH,YAAD,CAAD,CAAhC;;AAEA7G,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBsK,SAAlB,EAA6BvG,MAA7B,CAAT;AACA4G,2BAAeA,aAAanR,MAAb,GAAsBmR,YAAtB,GAAqC,CAAC,EAAD,CAApD;;AAGAA,yBAAazR,OAAb,CAAqB,UAAC8R,QAAD,EAAW1S,CAAX,EAAiB;AAClCyS,wCAAwBzS,CAAxB,IAA6B2S,sEAAsBA,8BAC3CD,QADqB,sBACRJ,YADQ,IAEzBE,SAFyB,EAGzBD,WAHyB,CAA7B;AAIH,aALD;;AAOA,mBAAOK,gEAAgBA,CAAC,IAAjB,EAAuBH,uBAAvB,EAAgDhH,MAAhD,EAAwD+G,SAAxD,CAAP;AACH;;;;;AAlvBD;;;;;;;;;;;mDAWmC/G,M,EAAQ;AACvC,mBAAOoH,4DAAiBA,CAACC,gBAAlB,CAAmCrH,MAAnC,CAAP;AACH;;;4BA/BsB;AACnB,mBAAOQ,4DAAP;AACH;;AAED;;;;;;4BAGwB;AACpB,mBAAO/F,0DAAP;AACH;;AAED;;;;;;4BAGwB;AACpB,mBAAO6M,sDAAP;AACH;;;;EApEmBC,iD;;AA6zBTlK,wEAAf,E;;;;;;;;;;;;ACv2BA;AAAA;AAAA;;AAEe;AACX5B,gBAAYd,iDAAUA,CAACC;AADZ,CAAf,E;;;;;;;;;;;;ACFA;AAAA;;;;;;;AAOA,IAAMD,aAAa;AACfS,aAAW,UADI;AAEfF,WAAS,QAFM;AAGfF,WAAS,QAHM;AAIfJ,QAAM;AAJS,CAAnB;;AAOeD,yEAAf,E;;;;;;;;;;;;ACdA;AAAA;;;;;;AAMA,IAAMsL,mBAAmB;AACrBuB,eAAa,aADQ;AAErBC,YAAU,UAFW;AAGrBvB,UAAQ;AAHa,CAAzB;;AAMeD,+EAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;;AAOA,IAAMH,YAAY;AACd4B,WAAS,SADK;AAEd3B,aAAW;AAFG,CAAlB;;AAKeD,wEAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;AAMA,IAAMW,gBAAgB;AAClBC,UAAQ,QADU;AAElBiB,WAAS,SAFS;AAGlBC,OAAK;AAHa,CAAtB;;AAMenB,4EAAf,E;;;;;;;;;;;;ACZA;AAAA;;;;;;AAMA,IAAMoB,qBAAqB;AACvBC,SAAK,KADkB;AAEvBC,SAAK,KAFkB;AAGvBC,SAAK,KAHkB;AAIvBC,SAAK,KAJkB;AAKvBC,WAAO,OALgB;AAMvBC,UAAM,MANiB;AAOvBC,WAAO,OAPgB;AAQvBC,SAAK;AARkB,CAA3B;;AAWeR,iFAAf,E;;;;;;;;;;;;ACjBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;;;;;;;AASA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;ACbA;AAAA;;;;;;AAMA,IAAMS,iBAAiB;AACnBC,cAAY;AADO,CAAvB;;AAIeD,6EAAf,E;;;;;;;;;;;;ACVA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AAiBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,IAAME,YAAY;AACdC,8DADc;AAEdC,sDAFc;AAGdC,4DAHc;AAIdC,8DAJc;AAKdvI,8DALc;AAMdwI,kFANc;AAOdhL,wDAPc;AAQdiL,wEARc;AASdC,oEATc;AAUdC,sEAVc;AAWdC,0EAXc;AAYdC,4EAZc;AAadC,0EAbc;AAcdC,0DAdc;AAed1F,oFAAkBA;AAfJ,CAAlB;;AAkBA,IAAM2F,UAAUC,0CAAGA,CAACD,OAApB;AACApU,OAAOgH,MAAP,CAAcoB,kDAAd,EAAyB;AACrBmL,wBADqB;AAErBe,8CAFqB;AAGrB/Q,6EAHqB;AAIrBgR,+EAJqB;AAKrB7O,qEALqB;AAMrB8L,2EANqB;AAOrBW,mFAPqB;AAQrBiC,oBARqB;AASrBhP,2EATqB;AAUrBoP,uDAAaA;AAVQ,CAAzB,EAWGC,mCAXH;;AAaerM,iHAAf,E;;;;;;;;;;;;AC3DA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;;AAEA;;;;;;;AAOA,SAASsM,eAAT,CAAyB9O,IAAzB,EAA+BC,MAA/B,EAAuC;AACnCD,WAAOA,QAAQ,EAAf;;AAEA,QAAIyM,qDAAaA,CAAC/M,GAAd,CAAkBO,OAAOkL,OAAzB,CAAJ,EAAuC;AACnC,eAAOsB,qDAAaA,CAAC9M,GAAd,CAAkBM,OAAOkL,OAAzB,EACU4D,OADV,CAEUpI,SAFV,CAEoB1G,OAAOxG,IAF3B,EAGUwG,MAHV,CAGiBA,MAHjB,EAIUD,IAJV,CAIeA,IAJf,EAKUwE,UALV,SAK0BxE,KAAKpF,MAAL,GAAc,CALxC,GAMUoU,KANV,EAAP;AAOH;AACD,WAAOvC,qDAAaA,CACH9M,GADV,CACcM,OAAOV,IAAP,KAAgB0L,gDAASA,CAAC4B,OAA1B,GAAoCY,qDAAcA,CAACC,UAAnD,GAAgEtC,uDAAgBA,CAACuB,WAD/F,EAEUoC,OAFV,CAGUpI,SAHV,CAGoB1G,OAAOxG,IAH3B,EAIUwG,MAJV,CAIiBA,MAJjB,EAKUD,IALV,CAKeA,IALf,EAMUwE,UANV,SAM0BxE,KAAKpF,MAAL,GAAc,CANxC,GAOUoU,KAPV,EAAP;AAQH;;AAGD;;;;;;;AAOO,SAASC,0BAAT,CAAoCjI,YAApC,EAAkDxC,UAAlD,EAA8D;AAAA,QACzDvE,MADyD,GAC9C+G,YAD8C,CACzD/G,MADyD;;;AAGjE,QAAIwM,qDAAaA,CAAC/M,GAAd,CAAkBO,OAAOkL,OAAzB,CAAJ,EAAuC;AACnC,eAAOsB,qDAAaA,CAAC9M,GAAd,CAAkBM,OAAOkL,OAAzB,EACU4D,OADV,CAEU/H,YAFV,CAEuBA,YAFvB,EAGUxC,UAHV,CAGqBA,UAHrB,EAIUwK,KAJV,EAAP;AAKH;AACD,WAAOvC,qDAAaA,CACH9M,GADV,CACcM,OAAOV,IAAP,KAAgB0L,gDAASA,CAAC4B,OAA1B,GAAoCY,qDAAcA,CAACC,UAAnD,GAAgEtC,uDAAgBA,CAACuB,WAD/F,EAEUoC,OAFV,CAGU/H,YAHV,CAGuBA,YAHvB,EAIUxC,UAJV,CAIqBA,UAJrB,EAKUwK,KALV,EAAP;AAMH;;AAED;;;;;;;;AAQO,SAASjG,YAAT,CAAsBmG,UAAtB,EAAkCjP,MAAlC,EAA0CqB,OAA1C,EAAmD;AACtD,QAAM6N,aAAa,EAAnB;;AAEA,QAAI,EAAE7N,WAAWA,QAAQ1G,MAArB,CAAJ,EAAkC;AAC9B0G,kBAAUrB,OAAOzG,GAAP,CAAW;AAAA,mBAAQ8I,KAAK7I,IAAb;AAAA,SAAX,CAAV;AACH;;AAED6H,YAAQhH,OAAR,CAAgB,UAAC6H,MAAD,EAASzI,CAAT,EAAe;AAC3ByV,mBAAWhN,MAAX,IAAqBzI,CAArB;AACH,KAFD;;AAIA,WAAOuG,OAAOzG,GAAP,CAAW;AAAA,eAAQsV,gBAAgBI,WAAWC,WAAW7M,KAAK7I,IAAhB,CAAX,CAAhB,EAAmD6I,IAAnD,CAAR;AAAA,KAAX,CAAP;AACH,C;;;;;;;;;;;;AC9ED;AAAA;AAAA;AAAA;AACA;;AAEA,IAAM8M,aAAa;AACfpP,UAAM,EADS;;AAGfqP,mBAHe,2BAGEC,QAHF,EAGY7V,IAHZ,EAGkB;AAC7B,YAAM8V,SAAS9V,QAAQ+V,0DAAWA,EAAlC;;AAEA,aAAKxP,IAAL,CAAUuP,MAAV,IAAoB;AAChB9V,kBAAM8V,MADU;AAEhB3N,oBAAQ0N,QAFQ;;AAIhBrI,qBAJgB,uBAIH;AACT,oBAAIA,YAAY,KAAKM,gBAArB;;AAEA,oBAAI,CAACN,SAAL,EAAgB;AACZA,gCAAY,KAAKM,gBAAL,GAAwB,EAApC;AACA,yBAAK3F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3BoF,kCAAUpF,MAAMpI,IAAN,EAAV,IAA0BoI,KAA1B;AACH,qBAFD;AAGH;AACD,uBAAOoF,SAAP;AACH,aAde;AAehBwI,sBAfgB,wBAeF;AACV,oBAAIC,gBAAgB,KAAKjI,cAAzB;;AAEA,oBAAI,CAACiI,aAAL,EAAoB;AAChBA,oCAAgB,KAAKjI,cAAL,GAAsB,EAAtC;AACA,yBAAK7F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3B,4BAAIA,MAAM5B,MAAN,GAAeV,IAAf,KAAwB0L,gDAASA,CAAC4B,OAAtC,EAA+C;AAC3C6C,0CAAc7N,MAAMpI,IAAN,EAAd,IAA8BoI,KAA9B;AACH;AACJ,qBAJD;AAKH;AACD,uBAAO6N,aAAP;AACH,aA3Be;AA4BhBC,wBA5BgB,0BA4BA;AACZ,oBAAIC,kBAAkB,KAAKpI,gBAA3B;;AAEA,oBAAI,CAAC,KAAKA,gBAAV,EAA4B;AACxBoI,sCAAkB,KAAKpI,gBAAL,GAAwB,EAA1C;AACA,yBAAK5F,MAAL,CAAYtH,OAAZ,CAAoB,UAACuH,KAAD,EAAW;AAC3B,4BAAIA,MAAM5B,MAAN,GAAeV,IAAf,KAAwB0L,gDAASA,CAACC,SAAtC,EAAiD;AAC7C0E,4CAAgB/N,MAAMpI,IAAN,EAAhB,IAAgCoI,KAAhC;AACH;AACJ,qBAJD;AAKH;AACD,uBAAO+N,eAAP;AACH;AAxCe,SAApB;AA0CA,eAAO,KAAK5P,IAAL,CAAUuP,MAAV,CAAP;AACH;AAjDc,CAAnB;;AAoDeH,yEAAf,E;;;;;;;;;;;;;;;;;;;;;;;ACvDA;AACA;;AAEA;;;;;;;;IAOqBS,M;;;;;;;;;;;;AACjB;;;;;;;8CAOuB;AACnB,gBAAMC,UAAU,KAAK9I,YAAL,CAAkB/G,MAAlB,CAAyB8K,IAAzC;AACA,mBAAO,CAAC+E,QAAQ,CAAR,CAAD,EAAaA,QAAQA,QAAQlV,MAAR,GAAiB,CAAzB,CAAb,CAAP;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAKoM,YAAL,CAAkB/G,MAAlB,CAAyB8K,IAAhC;AACH;;;iCAEe;AACZ,mBAAO,IAAIgF,8DAAJ,EAAP;AACH;;;;EAzB+BC,kD;;AAAfH,qE;;;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;AACA;AACA;AACA;;;;;;;;IAOqBI,W;;;;;;;;;;;;AACjB;;;;;;;kCAOW;AACP,mBAAO7E,uDAAgBA,CAACuB,WAAxB;AACH;;AAED;;;;;;;;;;8CAOuB;AAAA;;AACnB,gBAAMuD,OAAO,IAAIC,GAAJ,EAAb;AACA,gBAAMC,SAAS,EAAf;;AAEA;AACAvH,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA,oBAAI,CAACwW,KAAKxQ,GAAL,CAAS2E,KAAT,CAAL,EAAsB;AAClB6L,yBAAKG,GAAL,CAAShM,KAAT;AACA+L,2BAAO5V,IAAP,CAAY6J,KAAZ;AACH;AACJ,aAND;AAOA,mBAAO+L,MAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIE,mEAAJ,EAAP;AACH;;;;EApCoCN,kD;;AAApBC,0E;;;;;;;;;;;;;;;;;;;;;;;;;ACXrB;AACA;AACA;AACA;;AAEA;;;;;;;;IAOqBM,U;;;;;;;;;;;;AACjB;;;;;;;kCAOW;AACP,mBAAO9C,qDAAcA,CAACC,UAAtB;AACH;;AAED;;;;;;;;;;8CAOuB;AACnB,mBAAO8C,yEAAyBA,CAAC,KAAKxJ,YAAL,CAAkBhH,IAA5C,EAAkD,KAAKwE,UAAvD,CAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIiM,kEAAJ,EAAP;AACH;;;;EAzBmCC,gD;;AAAnBH,yE;;;;;;;;;;;;;;;;;;;;;;ACZrB;;AAEA;;;;;;;;IAOqBP,S;;;;;;;;;;;;AACjB;;;;;;;iCAOU;AACN,gBAAI,CAAC,KAAKW,aAAV,EAAyB;AACrB,qBAAKA,aAAL,GAAqB,KAAKC,mBAAL,EAArB;AACH;AACD,mBAAO,KAAKD,aAAZ;AACH;;AAED;;;;;;;;;8CAMuB;AACnB,kBAAM,IAAIjQ,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAEA;;;;;;;;;;wCAOgB;AACb,mBAAO,KAAKV,IAAL,EAAP;AACH;;;;EAlCkC6Q,8C;;AAAlBb,wE;;;;;;;;;;;;;;;;;;;;;;ACTrB;AACA;AACA;AACA;AACA;;IAGMc,iB;AACF,iCAAc;AAAA;;AACV,aAAKC,UAAL,GAAkB,IAAIjS,GAAJ,EAAlB;AACH;;;;0CAEiBqM,O,EAAS6F,S,EAAW;AAClC,iBAAKD,UAAL,CAAgB1R,GAAhB,CAAoB8L,OAApB,EAA6B6F,SAA7B;AACA,mBAAO,IAAP;AACH;;;4BAEGzR,I,EAAM;AACN,mBAAO,KAAKwR,UAAL,CAAgBrR,GAAhB,CAAoBH,IAApB,CAAP;AACH;;;4BAEGA,I,EAAM;AACN,mBAAO,KAAKwR,UAAL,CAAgBpR,GAAhB,CAAoBJ,IAApB,CAAP;AACH;;;;;;AAGL,IAAM0R,wBAAwB,SAAxBA,qBAAwB,CAACpS,KAAD,EAAW;AACrCA,UACiBqS,iBADjB,CACmC9F,uDAAgBA,CAACuB,WADpD,EACiEsD,oDADjE,EAEiBiB,iBAFjB,CAEmC9F,uDAAgBA,CAACwB,QAFpD,EAE8DuE,iDAF9D,EAGiBD,iBAHjB,CAGmC9F,uDAAgBA,CAACC,MAHpD,EAG4DwE,+CAH5D,EAIiBqB,iBAJjB,CAImCzD,qDAAcA,CAACC,UAJlD,EAI8D6C,mDAJ9D;AAKH,CAND;;AAQA,IAAM9D,gBAAiB,YAAY;AAC/B,QAAI5N,QAAQ,IAAZ;AACA,aAASgB,QAAT,GAAqB;AACjBhB,gBAAQ,IAAIiS,iBAAJ,EAAR;AACAG,8BAAsBpS,KAAtB;AACA,eAAOA,KAAP;AACH;AACD,WAAOA,SAASgB,UAAhB;AACH,CARsB,EAAvB;;AAUe4M,4EAAf,E;;;;;;;;;;;;;;;;;;;AC5CA;AACA;;AAEA;;;;;;;;;;;;;;;;;;;;IAmBqBoE,K;AACjB;;;;;;;AAOA,mBAAa7J,YAAb,EAA2BxC,UAA3B,EAAuC;AAAA;;AACnC,aAAKwC,YAAL,GAAoBA,YAApB;AACA,aAAKxC,UAAL,GAAkBA,UAAlB;AACH;;;;;;AAMD;;;;;;iCAMU;AACN,kBAAM,IAAI9D,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAED;;;;;;;;;iCAMU;AACN,mBAAO,KAAKsG,YAAL,CAAkB/G,MAAzB;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAK+G,YAAL,CAAkBvN,IAAzB;AACH;;AAED;;;;;;;;;+BAMQ;AACJ,mBAAO,KAAKuN,YAAL,CAAkB/G,MAAlB,CAAyBV,IAAhC;AACH;;AAED;;;;;;;;;kCAMW;AACP,mBAAO,KAAKyH,YAAL,CAAkB/G,MAAlB,CAAyBkL,OAAhC;AACH;;AAED;;;;;;;;;sCAMe;AACX,mBAAO,KAAKnE,YAAL,CAAkB/G,MAAlB,CAAyBmR,WAAhC;AACH;;AAED;;;;;;;;;sCAMe;AACX,mBAAO,KAAKpK,YAAL,CAAkB/G,MAAlB,CAAyBoR,WAAzB,IAAwC,KAAKrK,YAAL,CAAkB/G,MAAlB,CAAyBxG,IAAxE;AACH;;AAED;;;;;;;;;+BAMQ;AAAA;;AACJ,gBAAMuG,OAAO,EAAb;AACA6I,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvCsG,qBAAKxF,IAAL,CAAU,MAAKwM,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAV;AACH,aAFD;AAGA,mBAAOsG,IAAP;AACH;;AAED;;;;;;;;;wCAMiB;AACb,kBAAM,IAAIU,KAAJ,CAAU,qBAAV,CAAN;AACH;;;iCAhGe;AACZ,kBAAM,IAAIA,KAAJ,CAAU,qBAAV,CAAN;AACH;;;4BAgGoB;AACjB,gBAAM4Q,UAAU;AACZC,yBAAS,EADG;AAEZC,0BAAU,IAFE;AAGZ7K,yBAHY,qBAGFlN,IAHE,EAGI;AACZ,yBAAK8X,OAAL,CAAa9X,IAAb,GAAoBA,IAApB;AACA,2BAAO,IAAP;AACH,iBANW;AAOZwG,sBAPY,kBAOLA,OAPK,EAOG;AACX,yBAAKsR,OAAL,CAAatR,MAAb,GAAsBA,OAAtB;AACA,2BAAO,IAAP;AACH,iBAVW;AAWZD,oBAXY,gBAWPA,KAXO,EAWD;AACP,yBAAKuR,OAAL,CAAavR,IAAb,GAAoBA,KAApB;AACA,2BAAO,IAAP;AACH,iBAdW;AAeZgH,4BAfY,wBAeCA,aAfD,EAee;AACvB,yBAAKuK,OAAL,CAAavK,YAAb,GAA4BA,aAA5B;AACA,2BAAO,IAAP;AACH,iBAlBW;AAmBZxC,0BAnBY,sBAmBDA,WAnBC,EAmBW;AACnB,yBAAK+M,OAAL,CAAa/M,UAAb,GAA0BA,WAA1B;AACA,2BAAO,IAAP;AACH,iBAtBW;AAuBZwK,qBAvBY,mBAuBJ;AACJ,wBAAIhI,eAAe,IAAnB;AACA,wBAAI,KAAKuK,OAAL,CAAavK,YAAb,YAAqCyK,sDAAzC,EAAuD;AACnDzK,uCAAe,KAAKuK,OAAL,CAAavK,YAA5B;AACH,qBAFD,MAEO,IAAI,KAAKuK,OAAL,CAAatR,MAAb,IAAuB,KAAKsR,OAAL,CAAavR,IAAxC,EAA8C;AACjDgH,uCAAe,IAAIyK,sDAAJ,CAAiB,KAAKF,OAAL,CAAa9X,IAA9B,EACK,KAAK8X,OAAL,CAAavR,IADlB,EAEK,KAAKuR,OAAL,CAAatR,MAFlB,EAGK,KAAKuR,QAAL,CAAcE,MAAd,EAHL,CAAf;AAIH,qBALM,MAMF;AACD,8BAAM,IAAIhR,KAAJ,CAAU,0BAAV,CAAN;AACH;AACD,2BAAO,IAAI,KAAK8Q,QAAT,CAAkBxK,YAAlB,EAAgC,KAAKuK,OAAL,CAAa/M,UAA7C,CAAP;AACH;AArCW,aAAhB;AAuCA,mBAAO8M,OAAP;AACH;;;;;;AAxJgBT,oE;;;;;;;;;;;;ACtBrB;AAAA;AAAA;AAAA;AAAA;AACA;;AAEO,IAAML,4BAA4B,SAA5BA,yBAA4B,CAACxQ,IAAD,EAAOwE,UAAP,EAAsB;AAC3D,QAAImN,MAAM/M,OAAOgN,iBAAjB;AACA,QAAIC,MAAMjN,OAAOkN,iBAAjB;;AAEA;AACAjJ,6FAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAM2K,QAAQrE,KAAKtG,CAAL,CAAd;AACA,YAAI2K,iBAAiBkI,4DAArB,EAAwC;AACpC;AACH;;AAED,YAAIlI,QAAQsN,GAAZ,EAAiB;AACbA,kBAAMtN,KAAN;AACH;AACD,YAAIA,QAAQwN,GAAZ,EAAiB;AACbA,kBAAMxN,KAAN;AACH;AACJ,KAZD;;AAcA,WAAO,CAACsN,GAAD,EAAME,GAAN,CAAP;AACH,CApBM,C;;;;;;;;;;;;ACHP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;;;;;;;ACHA;AACA;AACA;;AAEA;;;;;;;;IAOqBnB,O;;;;;;;;;;;;AACnB;;;;;;;iCAOY;AACN,gBAAI,CAAC,KAAKC,aAAV,EAAyB;AACrB,qBAAKA,aAAL,GAAqB,KAAKC,mBAAL,EAArB;AACH;AACD,mBAAO,KAAKD,aAAZ;AACH;;AAEH;;;;;;;;;+BAMU;AACJ,mBAAO,KAAK3J,YAAL,CAAkB/G,MAAlB,CAAyB8R,IAAhC;AACH;;AAEH;;;;;;;;;mCAMc;AACR,mBAAO,KAAK/K,YAAL,CAAkB/G,MAAlB,CAAyB+R,QAAzB,IAAqCC,8EAA5C;AACH;;AAEH;;;;;;;;;uCAMkB;AAAA,gBACJC,YADI,GACa,KAAKlL,YAAL,CAAkB/G,MAD/B,CACJiS,YADI;;AAEZ,mBAAOA,wBAAwB3Y,QAAxB,GAAmC2Y,YAAnC,GAAkDC,mDAAzD;AACH;;AAEH;;;;;;;;;8CAMyB;AACnB,kBAAM,IAAIzR,KAAJ,CAAU,qBAAV,CAAN;AACH;;AAED;;;;;;;;;;wCAOiB;AACb,mBAAO,KAAKV,IAAL,EAAP;AACH;;;;EAjEgC6Q,8C;;AAAhBH,sE;;;;;;;;;;;;;;;;;;;;;;;ACXrB;AACA;;AAEA;;;;;;;;IAOqBX,Y;;;;;;;;;;;;AACnB;;;;;;;8BAOSqC,G,EAAK;AACR,gBAAMC,QAAQ,yDAAd;AACAD,kBAAME,OAAOF,GAAP,CAAN;AACA,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAChG,4DAAiBA,CAACiG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIK,UAAUL,IAAIM,KAAJ,CAAUL,KAAV,CAAd;AACAE,yBAASE,UAAa7N,OAAO+N,UAAP,CAAkBF,QAAQ,CAAR,CAAlB,CAAb,SAA8C7N,OAAO+N,UAAP,CAAkBF,QAAQ,CAAR,CAAlB,CAA9C,GACUlG,4DAAiBA,CAACqG,EADrC;AAEH,aAJD,MAIO;AACHL,yBAAShG,4DAAiBA,CAACsG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EArBqCO,qD;;AAArB/C,2E;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;;AAEA;;;;;;;;IAOqBO,iB;;;;;;;;;;;;AACnB;;;;;;;8BAOS8B,G,EAAK;AACR,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAChG,4DAAiBA,CAACiG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnCG,yBAASD,OAAOF,GAAP,EAAYta,IAAZ,EAAT;AACH,aAFD,MAEO;AACHya,yBAAShG,4DAAiBA,CAACsG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAjB0CO,qD;;AAA1BxC,gF;;;;;;;;;;;;;;;;;;;;;;;ACVrB;AACA;;AAEA;;;;;;;;IAOqBG,gB;;;;;;;;;;;;AACnB;;;;;;;8BAOS2B,G,EAAK;AACR,gBAAIG,eAAJ;AACA;AACA,gBAAI,CAAChG,4DAAiBA,CAACiG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIW,YAAYJ,WAAWP,GAAX,EAAgB,EAAhB,CAAhB;AACAG,yBAAS3N,OAAO3M,KAAP,CAAa8a,SAAb,IAA0BxG,4DAAiBA,CAACqG,EAA5C,GAAiDG,SAA1D;AACH,aAHD,MAGO;AACHR,yBAAShG,4DAAiBA,CAACsG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAlByCO,qD;;AAAzBrC,+E;;;;;;;;;;;;;;;;;ACVrB;;;;;;IAMqBqC,W;;;;;;;;AACjB;;;;;;4BAMS;AACL,YAAM,IAAIpS,KAAJ,CAAU,qBAAV,CAAN;AACH;;;;;;AATgBoS,0E;;;;;;;;;;;;;;;;;;;;;;;;ACNrB;AACA;AACA;;AAEA;;;;;;;;IAOqBE,c;;;;;;;;;;;;;AAEjB;;;;;;;8BAOOZ,G,QAAiB;AAAA,gBAAVzZ,MAAU,QAAVA,MAAU;;AACpB,gBAAI4Z,eAAJ;AACA;AACA,gBAAI,CAAC,KAAKU,IAAV,EAAgB;AACZ,qBAAKA,IAAL,GAAY,IAAItE,wDAAJ,CAAsBhW,MAAtB,CAAZ;AACH;AACD,gBAAI,CAAC4T,4DAAiBA,CAACiG,SAAlB,CAA4BJ,GAA5B,CAAL,EAAuC;AACnC,oBAAIc,aAAa,KAAKD,IAAL,CAAUE,aAAV,CAAwBf,GAAxB,CAAjB;AACAG,yBAASW,aAAaA,WAAWE,OAAX,EAAb,GAAoC7G,4DAAiBA,CAACqG,EAA/D;AACH,aAHD,MAGO;AACHL,yBAAShG,4DAAiBA,CAACsG,cAAlB,CAAiCT,GAAjC,CAAT;AACH;AACD,mBAAOG,MAAP;AACH;;;;EAtBuCO,qD;;AAAvBE,6E;;;;;;;;;;;;;;;;;ACXrB;;;;;;;;IAQqBvB,Y;AACjB;;;;;;;;;AASA,wBAAahY,IAAb,EAAmBuG,IAAnB,EAAyBC,MAAzB,EAAiCyR,MAAjC,EAAyC;AAAA;;AACrC,SAAKjY,IAAL,GAAYA,IAAZ;AACA,SAAKwG,MAAL,GAAcA,MAAd;AACA,SAAKyR,MAAL,GAAcA,MAAd;AACA,SAAK1R,IAAL,GAAY,KAAKqT,SAAL,CAAerT,IAAf,CAAZ;AACH;;AAED;;;;;;;;;;;8BAOWA,I,EAAM;AAAA;;AACb,aAAOA,KAAKxG,GAAL,CAAS;AAAA,eAAS,MAAKkY,MAAL,CAAYnZ,KAAZ,CAAkB8L,KAAlB,EAAyB,EAAE1L,QAAQ,MAAKsH,MAAL,CAAYtH,MAAtB,EAAzB,CAAT;AAAA,OAAT,CAAP;AACH;;;;;;AA1BgB8Y,2E;;;;;;;;;;;;;;;;;;;;;;;;;;;ACRrB;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;;IAOqBN,Q;;;AAChB;;;;;;;AAOD,sBAAanK,YAAb,EAA2BxC,UAA3B,EAAuC;AAAA;;AAAA,wHAC7BwC,YAD6B,EACfxC,UADe;;AAGnC,cAAK8O,cAAL,GAAsB,IAAtB;AAHmC;AAItC;;AAEA;;;;;;;;;;;8CAOsB;AACnB,mBAAO9C,yEAAyBA,CAAC,KAAKxJ,YAAL,CAAkBhH,IAA5C,EAAkD,KAAKwE,UAAvD,CAAP;AACH;;AAGD;;;;;;;;;uDAMgC;AAC5B,gBAAI,KAAK8O,cAAT,EAAyB;AACrB,uBAAO,KAAKA,cAAZ;AACH;;AAED,gBAAMC,aAAa,KAAKvT,IAAL,GAAYwT,MAAZ,CAAmB;AAAA,uBAAQ,EAAElR,gBAAgBiK,4DAAlB,CAAR;AAAA,aAAnB,EAAiEvJ,IAAjE,CAAsE,UAACyQ,CAAD,EAAIC,CAAJ;AAAA,uBAAUD,IAAIC,CAAd;AAAA,aAAtE,CAAnB;AACA,gBAAMC,QAAQJ,WAAW3Y,MAAzB;AACA,gBAAIgZ,UAAUhP,OAAOgN,iBAArB;AACA,gBAAIiC,kBAAJ;AACA,gBAAIC,kBAAJ;AACA,gBAAIC,iBAAiB,CAArB;;AAEA,iBAAK,IAAIra,IAAI,CAAb,EAAgBA,IAAIia,KAApB,EAA2Bja,GAA3B,EAAgC;AAC5Bma,4BAAYN,WAAW7Z,IAAI,CAAf,CAAZ;AACAoa,4BAAYP,WAAW7Z,CAAX,CAAZ;;AAEA,oBAAIoa,cAAcD,SAAlB,EAA6B;AACzB;AACH;;AAEDD,0BAAUI,KAAKrC,GAAL,CAASiC,OAAT,EAAkBE,YAAYP,WAAW7Z,IAAI,CAAf,CAA9B,CAAV;AACAqa;AACH;;AAED,gBAAI,CAACA,cAAL,EAAqB;AACjBH,0BAAU,IAAV;AACH;AACD,iBAAKN,cAAL,GAAsBM,OAAtB;;AAEA,mBAAO,KAAKN,cAAZ;AACH;;AAED;;;;;;;;;iCAMU;AACN,mBAAO,KAAKtM,YAAL,CAAkB/G,MAAlB,CAAyBtH,MAAhC;AACH;;AAED;;;;;;;;;;wCAOiB;AAAA;;AACb,gBAAMqH,OAAO,EAAb;AACA,gBAAMY,aAAa,KAAKjI,MAAL,EAAnB;;AAEAkQ,qGAAkBA,CAAC,KAAKrE,UAAxB,EAAoC,UAAC9K,CAAD,EAAO;AACvC,oBAAM2K,QAAQ,OAAK2C,YAAL,CAAkBhH,IAAlB,CAAuBtG,CAAvB,CAAd;AACA;AACA,oBAAI6S,4DAAiBA,CAACiG,SAAlB,CAA4BnO,KAA5B,KAAuC,CAACzD,UAAD,IAAegE,OAAOqP,QAAP,CAAgB5P,KAAhB,CAA1D,EAAmF;AAC/E;AACA,wBAAM6P,cAAc3H,4DAAiBA,CAACsG,cAAlB,CAAiCxO,KAAjC,KAA2CA,KAA/D;AACArE,yBAAKxF,IAAL,CAAU0Z,WAAV;AACH,iBAJD,MAIO;AACHlU,yBAAKxF,IAAL,CAAUmU,wDAAiBA,CAACwF,QAAlB,CAA2B9P,KAA3B,EAAkCzD,UAAlC,CAAV;AACH;AACJ,aAVD;AAWA,mBAAOZ,IAAP;AACH;;;iCAEe;AACZ,mBAAO,IAAIgT,gEAAJ,EAAP;AACH;;;;EArGiChD,kD;;AAAjBmB,uE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACdrB;AACA;AACA;AACA;AAGA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;AAGA,SAASiD,oBAAT,CAA+BxS,MAA/B,EAAuC0E,aAAvC,EAAsDR,OAAtD,EAA+DpM,CAA/D,EAAkE;AAC9D,QAAM2a,OAAO,EAAb;;AAD8D;AAAA;AAAA;;AAAA;AAG9D,6BAA2BzS,OAAO0S,OAAP,EAA3B,8HAA6C;AAAA;;AAAA;;AAAA,gBAAjC1c,GAAiC;AAAA,gBAA5BiK,KAA4B;;AACzCwS,iBAAKxS,MAAMpI,IAAN,EAAL,IAAqB,IAAI0N,8CAAJ,CAAUb,cAAc1O,GAAd,EAAmB8B,CAAnB,CAAV,EAAiCoM,QAAQlO,GAAR,EAAa8B,CAAb,CAAjC,EAAkDmI,KAAlD,CAArB;AACH;AAL6D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAM9D,WAAOwS,IAAP;AACH;;AAEM,SAASE,eAAT,CAA0B3S,MAA1B,EAAkC;AACrC,QAAMyS,OAAO,EAAb;;AAEA,SAAK,IAAMzc,GAAX,IAAkBgK,MAAlB,EAA0B;AACtByS,aAAKzc,GAAL,IAAY,IAAIuP,8CAAJ,CAAUvF,OAAOhK,GAAP,EAAY4c,cAAtB,EAAsC5S,OAAOhK,GAAP,EAAY6c,QAAlD,EAA4D7c,GAA5D,CAAZ;AACH;AACD,WAAOyc,IAAP;AACH;;AAEM,IAAMK,eAAe,SAAfA,YAAe,QAA8B9N,iBAA9B,EAAiD+N,cAAjD,EAAoE;AAAA;AAAA,QAAlEnQ,UAAkE;AAAA,QAAtDoQ,aAAsD;;AAC5F,QAAIC,SAASD,cAAcha,MAAd,GAAuBga,cAAcjQ,KAAd,CAAoB,GAApB,CAAvB,GAAkD,EAA/D;AACA,QAAImQ,kBAAkBlO,kBAAkBK,SAAlB,EAAtB;AACA,QAAI8N,YAAYF,OAAOrb,GAAP,CAAW;AAAA,eAAQyV,iFAA0BA,CAAC6F,gBAAgBE,IAAhB,EAAsBhO,YAAjD,EAA+DxC,UAA/D,CAAR;AAAA,KAAX,CAAhB;AACA,WAAO4K,oDAAUA,CAACC,eAAX,CAA2B0F,SAA3B,EAAsCJ,cAAtC,CAAP;AACH,CALM;;AAOA,IAAMM,2BAA2B,SAA3BA,wBAA2B,CAACjL,KAAD,EAAQkL,SAAR,EAA+C;AAAA,QAA5B/P,MAA4B,uEAAnB,EAAmB;AAAA,QAAfgQ,UAAe;;AACnF,QAAID,cAAcvX,yDAAcA,CAACI,OAAjC,EAA0C;AAAA;;AACtCiM,cAAMoL,WAAN,CAAkBxa,MAAlB,GAA2B,CAA3B;AACA,oCAAMwa,WAAN,EAAkB5a,IAAlB,8CAA0B2a,UAA1B;AACH,KAHD,MAGO;AACHnL,cAAMoL,WAAN,CAAkB5a,IAAlB,CAAuB;AACnB6a,gBAAIH,SADe;AAEnBI,kBAAMnQ,MAFa;AAGnBoQ,sBAAUJ;AAHS,SAAvB;AAKH;AACJ,CAXM;AAYA,IAAMK,4BAA4B,SAA5BA,yBAA4B,CAACC,QAAD,EAAWC,KAAX,EAAqB;AAAA;;AAC1D,mCAAMC,mBAAN,EAA0Bnb,IAA1B,iDAAkCib,SAASE,mBAA3C,4BAAmEF,SAASL,WAA5E;AACH,CAFM;;AAIA,IAAM3P,qBAAqB,SAArBA,kBAAqB,CAACgQ,QAAD,EAAWzL,KAAX,EAAkBkL,SAAlB,EAAyD;AAAA,QAA5B/P,MAA4B,uEAAnB,EAAmB;AAAA,QAAfgQ,UAAe;;AACvFF,6BAAyBjL,KAAzB,EAAgCkL,SAAhC,EAA2C/P,MAA3C,EAAmDgQ,UAAnD;AACAK,8BAA0BC,QAA1B,EAAoCzL,KAApC;AACH,CAHM;;AAKP,IAAM4L,sEACDhK,oDAAaA,CAACC,MADb,EACsB;AACpBgK,eAAW,CAAC,YAAD,CADS;AAEpBC,cAAU,CAAC,IAAD,EAAO,KAAP;AAFU,CADtB,mCAKDlK,oDAAaA,CAACkB,OALb,EAKuB;AACrB+I,eAAW,CAAC,kBAAD,CADU;AAErBC,cAAU,CAAC,KAAD,EAAQ,IAAR;AAFW,CALvB,mCASDlK,oDAAaA,CAACmB,GATb,EASmB;AACjB8I,eAAW,CAAC,YAAD,EAAe,kBAAf,CADM;AAEjBC,cAAU,CAAC,IAAD,EAAO,IAAP;AAFO,CATnB,kBAAN;;AAeA,IAAMC,qBAAqB,SAArBA,kBAAqB,CAACvR,UAAD,EAAa9K,CAAb,EAAgBsc,iBAAhB,EAAsC;AAC7D,QAAIA,sBAAsB,CAAC,CAAvB,IAA4Btc,MAAOsc,oBAAoB,CAA3D,EAA+D;AAC3D,YAAMC,KAAKzR,WAAW5J,MAAX,GAAoB,CAA/B;;AAEA4J,mBAAWyR,EAAX,IAAoBzR,WAAWyR,EAAX,EAAetR,KAAf,CAAqB,GAArB,EAA0B,CAA1B,CAApB,SAAoDjL,CAApD;AACH,KAJD,MAIO;AACH8K,mBAAWhK,IAAX,MAAmBd,CAAnB;AACH;AACJ,CARD;;AAUO,IAAMwc,2BAA2B,SAA3BA,wBAA2B,CAAC1R,UAAD,EAAa2R,OAAb,EAAsBxK,IAAtB,EAA+B;AACnE,QAAIyK,uBAAuB,CAAC,CAA5B;AACA,QAAIC,uBAAuB,CAAC,CAA5B;AACA,QAAMC,gBAAgB,EAAtB;AACA,QAAMC,gBAAgB,EAAtB;;AAJmE,+CAM9BX,cAAcjK,IAAd,EAAoBmK,QANU;AAAA,QAM5DU,YAN4D;AAAA,QAM9CC,YAN8C;;AAQnE5N,wEAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAMgd,gBAAgBP,QAAQzc,CAAR,CAAtB;AACAgd,yBAAiBF,YAAjB,IAAiCT,mBAAmBO,aAAnB,EAAkC5c,CAAlC,EAAqC0c,oBAArC,CAAjC;AACA,SAACM,aAAD,IAAkBD,YAAlB,IAAkCV,mBAAmBQ,aAAnB,EAAkC7c,CAAlC,EAAqC2c,oBAArC,CAAlC;AACH,KAJD;AAKA,WAAO;AACH7R,oBAAY8R,cAAczc,IAAd,CAAmB,GAAnB,CADT;AAEH8c,0BAAkBJ,cAAc1c,IAAd,CAAmB,GAAnB;AAFf,KAAP;AAIH,CAjBM;;AAoBA,IAAM+c,0BAA0B,SAA1BA,uBAA0B,CAACpS,UAAD,EAAa2R,OAAb,EAAsBxK,IAAtB,EAA4BH,YAA5B,EAA0CqL,aAA1C,EAA4D;AAC/F,QAAIb,oBAAoB,EAAxB;AACA,QAAMc,kBAAkB,EAAxB;AACA,QAAMC,eAAe,EAArB;;AAEAlO,wEAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAIyc,QAAQzc,CAAR,CAAJ,EAAgB;AACZ,gBAAIwW,OAAO,EAAX;;AAEA,gBAAI8G,eAAe,EAAEzU,MAAM,EAAR,EAAnB;;AAEAiJ,yBAAalR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxB,oBAAMhF,OAAO6W,cAAc7R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAb;AACAwW,uBAAUA,IAAV,SAAkBlQ,IAAlB;AACAgX,6BAAazU,IAAb,CAAkByC,CAAlB,IAAuBhF,IAAvB;AACH,aAJD;;AAMA,gBAAI8W,gBAAgB5G,IAAhB,MAA0B3L,SAA9B,EAAyC;AACrCuS,gCAAgB5G,IAAhB,IAAwB,EAAxB;AACA8F,kCAAkB9F,IAAlB,IAA0B,CAAC,CAA3B;AACA6G,6BAAa7G,IAAb,IAAqB8G,YAArB;AACH;;AAEDjB,+BAAmBe,gBAAgB5G,IAAhB,CAAnB,EAA0CxW,CAA1C,EAA6Csc,kBAAkB9F,IAAlB,CAA7C;AACA8F,8BAAkB9F,IAAlB,IAA0BxW,CAA1B;AACH;AACJ,KArBD;;AAuBA,WAAO;AACHod,wCADG;AAEHC;AAFG,KAAP;AAIH,CAhCM;;AAmCA,IAAME,eAAe,SAAfA,YAAe,CAACC,QAAD,EAAWC,QAAX,EAAqBhS,MAArB,EAA6BsQ,QAA7B,EAAuC2B,QAAvC,EAAoD;AAC5E,QAAI3O,cAAc,EAAlB;AACA,QAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,eAAM+M,SAAS9M,YAAT,EAAN;AAAA,KAApB;AAF4E,QAGpEgD,IAHoE,GAG3DxG,MAH2D,CAGpEwG,IAHoE;;AAI5E,QAAMnH,aAAa0S,SAAS7T,WAA5B;AACA,QAAMyD,qBAAqBoQ,SAASrQ,kBAAT,CAA4BE,mBAAvD;;AAEA,QAAMsQ,mBAAmB,SAAnBA,gBAAmB;AAAA,eAASF,SAC9BrQ,mBAAmBuB,KAAnB,CAD8B,EAE9BA,KAF8B,EAG9BK,aAH8B,EAI9BD,WAJ8B,CAAT;AAAA,KAAzB;;AAOA,WAAO2O,SAAS5S,UAAT,EAAqB6S,gBAArB,EAAuC1L,IAAvC,CAAP;AACH,CAfM;;AAiBA,IAAM2L,qBAAqB,SAArBA,kBAAqB,CAACtN,KAAD,EAAW;AACzC,QAAMkN,WAAWlN,MAAM1B,KAAN,CAAY,KAAZ,CAAjB;AACA,QAAM1B,oBAAoBoD,MAAM/G,oBAAN,EAA1B;AACAiU,aAAS3T,cAAT,GAA0BqD,kBAAkBhF,MAAlB,CAAyBpI,GAAzB,CAA6B;AAAA,eAAKO,EAAEN,IAAF,EAAL;AAAA,KAA7B,EAA4CI,IAA5C,CAAiD,GAAjD,CAA1B;;AAEA;AACA+M,sBAAkBW,gBAAlB,GAAqC,IAArC;AACAX,sBAAkBY,gBAAlB,GAAqC,IAArC;AACAZ,sBAAkBa,cAAlB,GAAmC,IAAnC;AACAyP,aAASxP,qBAAT,GAAiCC,qBAAjC;;AAEA,WAAOuP,QAAP;AACH,CAZM;;AAcP,IAAMK,SAAS,SAATA,MAAS,CAACzW,GAAD,EAAMd,IAAN,EAAYyK,EAAZ,EAAgB+M,KAAhB,EAA0B;AACrC,QAAI5f,MAAM6S,GAAG3J,GAAH,EAAQd,IAAR,EAAc,CAAd,EAAiBwX,KAAjB,CAAV;;AAEA,SAAK,IAAI9d,IAAI,CAAR,EAAW+d,MAAM3W,IAAIlG,MAA1B,EAAkClB,IAAI+d,GAAtC,EAA2C/d,GAA3C,EAAgD;AAC5C9B,cAASA,GAAT,SAAgB6S,GAAG3J,GAAH,EAAQd,IAAR,EAActG,CAAd,EAAiB8d,KAAjB,CAAhB;AACH;AACD,WAAO5f,GAAP;AACH,CAPD;;AASA,IAAM8f,QAAQ,SAARA,KAAQ,CAAC5W,GAAD,EAAMc,MAAN,EAAcoC,GAAd,EAAmBwT,KAAnB,EAA6B;AACvC,QAAM3V,QAAQf,IAAIkD,GAAJ,CAAd;AACA,QAAMoO,MAAMvQ,UAAUnE,iDAAV,GAAmB8Z,KAAnB,GAA2B5V,OAAOC,KAAP,EAAc8V,aAArD;AACA,WAAOvF,GAAP;AACH,CAJD;;AAMA,IAAMwF,gBAAgB,SAAhBA,aAAgB,CAACxF,GAAD,EAAMhC,MAAN,EAAiB;AACnC,QAAMyH,YAAYzH,OAAO,CAAP,aAAqBvV,KAArB,GAA6BuV,MAA7B,GAAsC,CAACA,MAAD,CAAxD;AACA,WAAOyH,UAAUC,IAAV,CAAe;AAAA,eAAO1F,OAAO2F,IAAI,CAAJ,CAAP,IAAiB3F,OAAO2F,IAAI,CAAJ,CAA/B;AAAA,KAAf,CAAP;AACH,CAHD;;AAKA,IAAMC,sEACDvK,qDAAcA,CAACC,UADd,EAC2BkK,aAD3B,mCAEDxM,uDAAgBA,CAACwB,QAFhB,EAE2BgL,aAF3B,kBAAN;;AAKA,IAAMK,iBAAiB,SAAjBA,cAAiB,CAACpgB,KAAD,EAAQuY,MAAR,EAAgB8H,SAAhB;AAAA,WAA8BF,cAAcE,SAAd,EAAyBrgB,KAAzB,EAAgCuY,MAAhC,CAA9B;AAAA,CAAvB;;AAEO,IAAM+H,yBAAyB,SAAzBA,sBAAyB,CAACnO,KAAD,EAAQoO,UAAR,EAAoC;AAAA,QAAhBjT,MAAgB,uEAAP,EAAO;;AACtE,QAAIkT,MAAM,EAAV;AACA,QAAMnD,YAAY/P,OAAO+P,SAAP,IAAoBzW,4DAAiBA,CAACC,GAAxD;AAFsE,8BAGAyG,MAHA,CAG9DmT,WAH8D;AAAA,QAG9DA,WAH8D,uCAGhD,IAHgD;AAAA,gCAGAnT,MAHA,CAG1CoT,eAH0C;AAAA,QAG1CA,eAH0C,yCAGxB,KAHwB;AAAA,wBAGApT,MAHA,CAGjBmD,KAHiB;AAAA,QAGjBA,KAHiB,iCAGT,IAHS;;AAItE,QAAMkQ,cAAclQ,QAAQgP,mBAAmBtN,KAAnB,CAAR,GAAoCA,KAAxD;AACA,QAAMyO,oBAAoBD,YAAYxQ,eAAZ,EAA1B;;AAEA,QAAI,CAACoQ,WAAWxd,MAAhB,EAAwB;AACpByd,cAAM,CAAC;AAAA,mBAAM,KAAN;AAAA,SAAD,CAAN;AACH,KAFD,MAEO;AACHA,cAAMD,WAAW5e,GAAX,CAAe;AAAA,mBAAc,iBAAuB;AAAA,2CAApB+b,QAAoB;AAAA,oBAApBA,QAAoB,kCAAT,EAAS;AAAA,4CACZA,QADY,CAC9CtM,WAD8C;AAAA,oBAC9CA,WAD8C,yCAChC,CAAC,EAAD,EAAK,EAAL,CADgC;AAAA,oBACtByP,KADsB,GACZnD,QADY,CACtBmD,KADsB;;AAAA,kDAEjBzP,WAFiB;AAAA;AAAA,oBAEjDtF,UAFiD,iCAEpC,EAFoC;AAAA;AAAA,oBAEhCgV,MAFgC,kCAEvB,EAFuB;;AAGtD,oBAAMC,UAAUjV,WAAWlC,MAAX,CAAkB,UAACjI,GAAD,EAAMC,IAAN,EAAYC,CAAZ,EAAkB;AAChDF,wBAAIC,IAAJ,IAAYC,CAAZ;AACA,2BAAOF,GAAP;AACH,iBAHe,EAGb,EAHa,CAAhB;AAIAmK,6BAAaA,WAAW6P,MAAX,CAAkB;AAAA,2BAAU3R,SAAS4W,iBAAT,IACrCA,kBAAkB5W,KAAlB,EAAyBgX,GAAzB,CAA6BtZ,IAA7B,KAAsC0L,gDAASA,CAACC,SADZ,IAC0BrJ,UAAUnE,iDAD7C;AAAA,iBAAlB,CAAb;AAEA,oBAAMob,OAAOnV,WAAW/I,MAAxB;AACA,oBAAMme,YAAY,EAAlB;;AAEA,oBAAID,IAAJ,EAAU;AAAA,+CACGpf,CADH,EACU+d,GADV;AAEF,4BAAMzd,MAAMiP,YAAYvP,CAAZ,CAAZ;AACA,4BAAM9B,WAAS+L,WAAWnK,GAAX,CAAe,UAACqI,KAAD,EAAW;AACrC,gCAAMmC,MAAM4U,QAAQ/W,KAAR,CAAZ;AACA,mCAAO7H,IAAIgK,GAAJ,CAAP;AACH,yBAHc,CAAf;AAIA+U,kCAAUnhB,GAAV,IAAiB,CAAjB;AAPE;;AACN,yBAAK,IAAI8B,IAAI,CAAR,EAAW+d,MAAMxO,YAAYrO,MAAlC,EAA0ClB,IAAI+d,GAA9C,EAAmD/d,GAAnD,EAAwD;AAAA,8BAA/CA,CAA+C,EAAxC+d,GAAwC;AAOvD;AACJ;AACD,oBAAIuB,YAAY5e,OAAOmI,IAAP,CAAYmW,SAAS,EAArB,EAAyBlF,MAAzB,CAAgC;AAAA,2BAAS3R,SAAS4W,iBAAlB;AAAA,iBAAhC,CAAhB;AACA,oBAAMQ,UAAUN,OAAO/d,MAAP,IAAiBoe,UAAUpe,MAA3C;;AAEA,oBAAI,CAAC2d,eAAL,EAAsB;AAClBS,gCAAYA,UAAUxF,MAAV,CAAiB;AAAA,+BAASiF,kBAAkB5W,KAAlB,EAAyBgX,GAAzB,CAA6BtZ,IAA7B,KAAsC0L,gDAASA,CAAC4B,OAAzD;AAAA,qBAAjB,CAAZ;AACH;;AAED,oBAAI,CAACyL,WAAL,EAAkB;AACdU,gCAAYA,UAAUxF,MAAV,CAAiB;AAAA,+BAASiF,kBAAkB5W,KAAlB,EAAyBgX,GAAzB,CAA6BtZ,IAA7B,KAAsC0L,gDAASA,CAACC,SAAzD;AAAA,qBAAjB,CAAZ;AACH;;AAED,uBAAO+N,UAAU,UAACrX,MAAD,EAASlI,CAAT,EAAe;AAC5B,wBAAIwf,UAAU,IAAd;AACA,wBAAIZ,WAAJ,EAAiB;AACbY,kCAAUJ,OAAOC,UAAUxB,OAAO5T,UAAP,EAAmB/B,MAAnB,EAA2B8V,KAA3B,EAAkChe,CAAlC,CAAV,CAAP,GAAyD,IAAnE;AACH;;AAED,2BAAOsf,UAAUG,KAAV,CAAgB,UAACtX,KAAD,EAAW;AAC9B,4BAAMuQ,MAAMxQ,OAAOC,KAAP,EAAc8V,aAA1B;AACA,+BAAOM,eAAe7F,GAAf,EAAoBsG,MAAM7W,KAAN,CAApB,EAAkC4W,kBAAkB5W,KAAlB,EAAyBgX,GAAzB,CAA6B1N,OAA/D,CAAP;AACH,qBAHM,KAGD+N,OAHN;AAIH,iBAVM,GAUH;AAAA,2BAAM,KAAN;AAAA,iBAVJ;AAWH,aA5CiC,CA4C/B3O,SA5C+B,CAAb;AAAA,SAAf,CAAN;AA6CH;;AAED,QAAI6O,sBAAJ;AACA,QAAIlE,cAAczW,4DAAiBA,CAACC,GAApC,EAAyC;AACrC0a,wBAAgBZ,YAAY1K,MAAZ,CAAmB,UAAClM,MAAD,EAASlI,CAAT;AAAA,mBAAe2e,IAAIc,KAAJ,CAAU;AAAA,uBAAM1O,GAAG7I,MAAH,EAAWlI,CAAX,CAAN;AAAA,aAAV,CAAf;AAAA,SAAnB,EAAkE;AAC9E0L,uBAAW;AADmE,SAAlE,CAAhB;AAGH,KAJD,MAIO;AACHgU,wBAAgBZ,YAAY1K,MAAZ,CAAmB,UAAClM,MAAD,EAASlI,CAAT;AAAA,mBAAe2e,IAAIP,IAAJ,CAAS;AAAA,uBAAMrN,GAAG7I,MAAH,EAAWlI,CAAX,CAAN;AAAA,aAAT,CAAf;AAAA,SAAnB,EAAiE;AAC7E0L,uBAAW;AADkE,SAAjE,CAAhB;AAGH;;AAED,WAAOgU,aAAP;AACH,CArEM;;AAwEA,IAAMtN,kBAAkB,SAAlBA,eAAkB,CAAC2J,QAAD,EAAWjK,YAAX,EAA4D;AAAA,QAAnCC,SAAmC,uEAAvB;AAAA,eAAO2G,GAAP;AAAA,KAAuB;AAAA,QAAXjN,MAAW;AAAA,QAEnFC,SAFmF,GAGnFD,MAHmF,CAEnFC,SAFmF;;AAIvF,QAAMyR,gBAAgBpB,SAASrP,aAAT,GAAyBa,SAAzB,EAAtB;;AAJuF,wBASnFgQ,aACAxB,SAASnN,KAAT,CAAelD,SAAf,CADA,EAEAqG,SAFA,EAGAtG,MAHA,EAIAsQ,QAJA,EAKA;AAAA,0CAAInQ,MAAJ;AAAIA,kBAAJ;AAAA;;AAAA,eAAesR,yCAA2BtR,MAA3B,SAAmCkG,YAAnC,EAAiDqL,aAAjD,GAAf;AAAA,KALA,CATmF;AAAA,QAOnFC,eAPmF,iBAOnFA,eAPmF;AAAA,QAQnFC,YARmF,iBAQnFA,YARmF;;AAiBvF,QAAMsC,YAAY,EAAlB;AACAjf,WAAOmI,IAAP,CAAYuU,eAAZ,EAA6B9T,IAA7B,GAAoC1I,OAApC,CAA4C,UAACsJ,CAAD,EAAO;AAC/C,YAAIkT,gBAAgBlT,CAAhB,CAAJ,EAAwB;AACpB,gBAAM0V,SAAS7D,SAASnN,KAAT,CAAelD,SAAf,CAAf;AACA,gBAAMmU,aAAaxC,aAAanT,CAAb,CAAnB;AACA0V,mBAAOjW,WAAP,GAAqByT,gBAAgBlT,CAAhB,EAAmB/J,IAAnB,CAAwB,GAAxB,CAArB;AACAyf,mBAAO5R,qBAAP,GAA+BC,qBAA/B;;AAEA,gBAAM6R,oBAAoB,SAApBA,iBAAoB;AAAA,uBAAUhO,aAAa2N,KAAb,CAAmB;AAAA,2BAAKvX,OAAOoD,CAAP,EAAU2S,aAAV,KAA4B4B,WAAWhX,IAAX,CAAgByC,CAAhB,CAAjC;AAAA,iBAAnB,CAAV;AAAA,aAA1B;AACA;AACA,gBAAII,SAAJ,EAAe;AACXK,mCAAmBgQ,QAAnB,EAA6B6D,MAA7B,EAAqC3b,yDAAcA,CAACC,MAApD,EAA4DuH,MAA5D,EAAoEqU,iBAApE;AACH;AACDF,mBAAOlE,WAAP,CAAmBkE,OAAOlE,WAAP,CAAmBxa,MAAnB,GAA4B,CAA/C,EAAkD0a,IAAlD,GAAyDyB,aAAanT,CAAb,CAAzD;;AAEAyV,sBAAU7e,IAAV,CAAe8e,MAAf;AACH;AACJ,KAhBD;;AAmBA,WAAOD,SAAP;AACH,CAtCM;AAuCA,IAAMI,uBAAuB,SAAvBA,oBAAuB,CAACvC,QAAD,EAAW1S,UAAX,EAAuBiR,QAAvB,EAAiCiE,YAAjC,EAA+CvC,QAA/C,EAA4D;AAC5FD,aAAS7T,WAAT,GAAuBmB,UAAvB;AACA0S,aAASxP,qBAAT,GAAiCC,qBAAjC;AACAlC,uBACIgQ,QADJ,EAEIyB,QAFJ,EAGIvZ,yDAAcA,CAACC,MAHnB,EAIK,EAAEuH,QAAQuU,YAAV,EAJL,EAKMvC,QALN;AAOH,CAVM;;AAaA,IAAMwC,kBAAkB,SAAlBA,eAAkB,CAAClE,QAAD,EAAW0B,QAAX,EAAqBuC,YAArB,EAAmCE,WAAnC,EAAmD;AAC9E,QAAIC,eAAe,EAAnB;;AAD8E,QAGxElO,IAHwE,GAG/D+N,YAH+D,CAGxE/N,IAHwE;;;AAK9E,QAAM2N,SAAS7D,SAASnN,KAAT,CAAesR,YAAYxU,SAA3B,CAAf;AACA,QAAM0U,mBAAmB7C,aACrBqC,MADqB,EAErBnC,QAFqB,EAGrBuC,YAHqB,EAIrBjE,QAJqB,EAKrBS,wBALqB,CAAzB;AAOA,QAAML,YAAYD,cAAcjK,IAAd,EAAoBkK,SAAtC;;AAEA4D,yBAAqBH,MAArB,EAA6BQ,iBAAiBjE,UAAU,CAAV,CAAjB,CAA7B,EAA6DJ,QAA7D,EAAuEiE,YAAvE,EAAqFvC,QAArF;;AAEA,QAAItB,UAAUjb,MAAV,GAAmB,CAAvB,EAA0B;AACtBif,uBAAepE,SAASnN,KAAT,CAAesR,YAAYxU,SAA3B,CAAf;AACAqU,6BAAqBI,YAArB,EAAmCC,iBAAiBjE,UAAU,CAAV,CAAjB,CAAnC,EAAmEJ,QAAnE,EAA6EiE,YAA7E,EAA2FvC,QAA3F;AACA,eAAO,CAACmC,MAAD,EAASO,YAAT,CAAP;AACH;;AAED,WAAOP,MAAP;AACH,CAxBM;;AA0BA,IAAMS,mBAAmB,SAAnBA,gBAAmB,CAACtE,QAAD,EAAWuE,SAAX,EAAsB7U,MAAtB,EAA8B+G,SAA9B,EAA4C;AACxE,QAAMoN,SAAS7D,SAASnN,KAAT,CAAenD,OAAOC,SAAtB,CAAf;AACA,QAAI6U,gBAAgBD,SAApB;AACA,QAAI7U,OAAOwG,IAAP,KAAgBC,oDAAaA,CAACkB,OAAlC,EAA2C;AACvCmN,wBAAgB/N,UAAUsH,MAAV,CAAiB;AAAA,mBAAawG,UAAU/V,OAAV,CAAkB0C,SAAlB,MAAiC,CAAC,CAA/C;AAAA,SAAjB,CAAhB;AACH;AACD;AACA;AACA2S,WAAO/V,cAAP,GAAwB0W,cAAcpgB,IAAd,CAAmB,GAAnB,CAAxB;AACAyf,WAAO5R,qBAAP,GAA+BC,qBAA/B;;AAEAlC,uBACIgQ,QADJ,EAEI6D,MAFJ,EAGI3b,yDAAcA,CAACE,OAHnB,EAII,EAAEmc,oBAAF,EAAa7U,cAAb,EAAqB+U,iBAAiBD,aAAtC,EAJJ,EAKI,IALJ;;AAQA,WAAOX,MAAP;AACH,CApBM;;AAuBA,IAAMhN,mBAAmB,SAAnBA,gBAAmB,CAACmJ,QAAD,EAAW0E,YAAX,EAAyBhV,MAAzB,EAAiC+G,SAAjC;AAAA,WAC5BiO,aAAa3gB,GAAb,CAAiB;AAAA,eACbugB,iBAAiBtE,QAAjB,EAA2B2E,UAA3B,EAAuCjV,MAAvC,EAA+C+G,SAA/C,CADa;AAAA,KAAjB,CAD4B;AAAA,CAAzB;;AAIA,IAAMrE,qBAAqB,SAArBA,kBAAqB,CAAC1G,UAAD,EAAgB;AAC9C;AACAA,iBAAakZ,sDAAOA,CAAC,EAAR,EAAYlZ,UAAZ,CAAb;AACA,QAAI,CAACA,WAAW5B,IAAhB,EAAsB;AAClB4B,mBAAW5B,IAAX,GAAkB0L,gDAASA,CAACC,SAA5B;AACH;;AAED,QAAI,CAAC/J,WAAWgK,OAAhB,EAAyB;AACrB,gBAAQhK,WAAW5B,IAAnB;AACA,iBAAK0L,gDAASA,CAAC4B,OAAf;AACI1L,2BAAWgK,OAAX,GAAqBsC,qDAAcA,CAACC,UAApC;AACA;AACJ;AACA,iBAAKzC,gDAASA,CAACC,SAAf;AACI/J,2BAAWgK,OAAX,GAAqBC,uDAAgBA,CAACuB,WAAtC;AACA;AAPJ;AASH;;AAED,WAAOxL,UAAP;AACH,CApBM;;AAsBA,IAAMmZ,qBAAqB,SAArBA,kBAAqB,CAACnZ,UAAD,EAAgB;AAAA,QACtC5B,IADsC,GACd4B,UADc,CACtC5B,IADsC;AAAA,QAChC4L,OADgC,GACdhK,UADc,CAChCgK,OADgC;AAAA,QACvB1R,IADuB,GACd0H,UADc,CACvB1H,IADuB;;AAE9C,QAAI8F,SAAS0L,gDAASA,CAACC,SAAnB,IAAgC3L,SAAS0L,gDAASA,CAAC4B,OAAvD,EAAgE;AAC5D,YAAI,CAACJ,qDAAaA,CAAC/M,GAAd,CAAkByL,OAAlB,CAAL,EAAiC;AAC7B,kBAAM,IAAIzK,KAAJ,uDAA6DyK,OAA7D,kBAAiF1R,IAAjF,YAAN;AACH;AACJ,KAJD,MAIO;AACH,cAAM,IAAIiH,KAAJ,4CAAkDnB,IAAlD,kBAAmE9F,IAAnE,YAAN;AACH;AACJ,CATM;;AAWA,IAAM8gB,4BAA4B,SAA5BA,yBAA4B;AAAA,WAAUta,OAAOzG,GAAP,CAAW,UAAC2H,UAAD,EAAgB;AAC1EA,qBAAa0G,mBAAmB1G,UAAnB,CAAb;AACAmZ,2BAAmBnZ,UAAnB;AACA,eAAOA,UAAP;AACH,KAJkD,CAAV;AAAA,CAAlC;;AAMA,IAAMqZ,mBAAmB,SAAnBA,gBAAmB,CAACva,MAAD,EAASwa,UAAT,EAAwB;AACpDxa,WAAO3F,OAAP,CAAe,UAAC6G,UAAD,EAAgB;AAC3B,YAAMuZ,cAAcvZ,WAAWwZ,EAA/B;AACA,YAAI,CAACD,WAAL,EAAkB;AAAE;AAAS;;AAE7B,YAAM1W,MAAMyW,WAAWxW,OAAX,CAAmB9C,WAAW1H,IAA9B,CAAZ;AACAghB,mBAAWzW,GAAX,IAAkB0W,WAAlB;AACAvZ,mBAAW1H,IAAX,GAAkBihB,WAAlB;AACA,eAAOvZ,WAAWwZ,EAAlB;AACH,KARD;AASH,CAVM;;AAYA,IAAMC,aAAa,SAAbA,UAAa,CAACC,QAAD,EAAW7a,IAAX,EAAiBC,MAAjB,EAAyBC,OAAzB,EAAqC;AAC3DD,aAASsa,0BAA0Bta,MAA1B,CAAT;AACAC,cAAU9F,OAAOgH,MAAP,CAAchH,OAAOgH,MAAP,CAAc,EAAd,EAAkB0Z,uDAAlB,CAAd,EAAgD5a,OAAhD,CAAV;AACA,QAAMZ,YAAYM,yDAAcA,CAACD,GAAf,CAAmBO,QAAQU,UAA3B,CAAlB;;AAGA,QAAI,CAACtB,SAAL,EAAgB;AACZ,cAAM,IAAIoB,KAAJ,sCAA6CR,QAAQU,UAArD,aAAN;AACH;;AAR0D,6BAU3BtB,UAAUnD,OAAV,CAAkB6D,IAAlB,EAAwBC,MAAxB,EAAgCC,OAAhC,CAV2B;AAAA;AAAA,QAUpDiC,MAVoD;AAAA,QAU5CmE,aAV4C;;AAW3DkU,qBAAiBva,MAAjB,EAAyBkC,MAAzB;AACA,QAAMmN,WAAWvG,mEAAYA,CAACzC,aAAb,EAA4BrG,MAA5B,EAAoCkC,MAApC,CAAjB;;AAEA;AACA,QAAM4Y,YAAY3L,oDAAUA,CAACC,eAAX,CAA2BC,QAA3B,EAAqCpP,QAAQzG,IAA7C,CAAlB;AACAohB,aAAShU,kBAAT,GAA8BkU,SAA9B;;AAEA;AACAF,aAASxX,WAAT,GAAuBiD,cAAc1L,MAAd,IAAwB0L,cAAc,CAAd,EAAiB1L,MAAzC,WAAuD0L,cAAc,CAAd,EAAiB1L,MAAjB,GAA0B,CAAjF,IAAuF,EAA9G;;AAEA;AACA,QAAMogB,eAAe,EAArB;AAtB2D,QAuBnDpZ,MAvBmD,GAuBxCmZ,SAvBwC,CAuBnDnZ,MAvBmD;;AAwB3D,QAAMqZ,gBAAgBrZ,OAAOpI,GAAP,CAAW;AAAA,eAASqI,MAAM7B,IAAN,EAAT;AAAA,KAAX,CAAtB;AACA,QAAMkb,sBAAsBtZ,OAAOpI,GAAP,CAAW;AAAA,eAASqI,MAAMyE,aAAN,EAAT;AAAA,KAAX,CAA5B;AACAuC,wEAAkBA,CAACgS,SAASxX,WAA5B,EAAyC,UAAC3J,CAAD,EAAO;AAC5CshB,qBAAathB,CAAb,IAAkB0a,qBAAqBxS,MAArB,EAA6BsZ,mBAA7B,EAAkDD,aAAlD,EAAiEvhB,CAAjE,CAAlB;AACH,KAFD;AAGAqhB,cAAUhU,mBAAV,GAAgCiU,YAAhC;;AAEAH,aAAStX,cAAT,GAA2BtD,OAAOzG,GAAP,CAAW;AAAA,eAAKwL,EAAEvL,IAAP;AAAA,KAAX,CAAD,CAA0BI,IAA1B,EAA1B;AACAghB,aAAS1U,WAAT,GAAuBjG,QAAQU,UAAR,KAAuBd,iDAAUA,CAACC,IAAlC,GAAyCc,+DAAgBA,CAACb,IAAjB,CAAzC,GAAkEE,QAAQU,UAAjG;AACA,WAAOia,QAAP;AACH,CAlCM;;AAoCA,IAAMM,gBAAgB,SAAhBA,aAAgB,CAAClb,MAAD,EAAS4B,KAAT,EAAmB;AAC5C,QAAInI,IAAI,CAAR;;AAEA,WAAOA,IAAIuG,OAAOrF,MAAlB,EAA0B,EAAElB,CAA5B,EAA+B;AAC3B,YAAImI,UAAU5B,OAAOvG,CAAP,EAAUD,IAAxB,EAA8B;AAC1B,mBAAO;AACHA,sBAAMoI,KADH;AAEHtC,sBAAMU,OAAOvG,CAAP,EAAUyR,OAAV,IAAqBlL,OAAOvG,CAAP,EAAU6F,IAFlC;AAGH8I,uBAAO3O;AAHJ,aAAP;AAKH;AACJ;AACD,WAAO,IAAP;AACH,CAbM;;AAeA,IAAM0hB,yBAAyB,SAAzBA,sBAAyB,CAAC7B,UAAD,EAAgB;AAClD,QAAIjU,SAAS,EAAb;AACA,QAAI4P,kBAAJ;AACAA,gBAAYqE,WAAWlE,EAAvB;AACA,YAAQH,SAAR;AACA,aAAKvX,yDAAcA,CAACC,MAApB;AACI0H,qBAAS,CAACiU,WAAWhE,QAAZ,CAAT;AACA;AACJ,aAAK5X,yDAAcA,CAACE,OAApB;AACIyH,qBAAS,CAACiU,WAAWjE,IAAX,CAAgB4E,eAAjB,CAAT;AACA;AACJ,aAAKvc,yDAAcA,CAACO,IAApB;AACIoH,qBAAS,CAACiU,WAAWhE,QAAZ,CAAT;AACA;AACJ,aAAK5X,yDAAcA,CAACG,OAApB;AACIoX,wBAAY,SAAZ;AACA5P,qBAAS,CAACiU,WAAWjE,IAAX,CAAgBjQ,aAAhB,CAA8BV,KAA9B,CAAoC,GAApC,CAAD,EAA2C4U,WAAWhE,QAAtD,CAAT;AACA;AACJ;AACIL,wBAAY,IAAZ;AAfJ;;AAkBA,WAAO;AACHA,4BADG;AAEH5P;AAFG,KAAP;AAIH,CA1BM;;AA4BP,IAAM+V,gCAAgC,SAAhCA,6BAAgC,CAAC9Q,SAAD,EAAY+Q,SAAZ,EAA0B;AAC5D,QAAMC,cAAcD,UAAUE,cAAV,EAApB;AACA,QAAIC,iBAAiBlR,SAArB;;AAEAgR,gBAAYjhB,OAAZ,CAAoB,UAACif,UAAD,EAAgB;AAChC,YAAI,CAACA,UAAL,EAAiB;AACb;AACH;;AAH+B,oCAKF6B,uBAAuB7B,UAAvB,CALE;AAAA,YAKxBrE,SALwB,yBAKxBA,SALwB;AAAA,YAKb5P,MALa,yBAKbA,MALa;;AAMhC,YAAI4P,SAAJ,EAAe;AAAA;;AACXuG,6BAAiB,mCAAevG,SAAf,4CAA6B5P,MAA7B,UAAqC;AAClDF,2BAAW;AADuC,aAArC,GAAjB;AAGH;AACJ,KAXD;;AAaA,WAAOqW,cAAP;AACH,CAlBD;;AAoBA,IAAMC,mBAAmB,SAAnBA,gBAAmB,CAACnR,SAAD,EAAYoR,IAAZ,EAAqB;AAC1C,SAAK,IAAIjiB,IAAI,CAAR,EAAW+d,MAAMkE,KAAK/gB,MAA3B,EAAmClB,IAAI+d,GAAvC,EAA4C/d,GAA5C,EAAiD;AAC7C,YAAMsQ,QAAQ2R,KAAKjiB,CAAL,CAAd;AACA6Q,oBAAY8Q,8BAA8B9Q,SAA9B,EAAyCP,KAAzC,CAAZ;AACH;AACD,WAAOO,SAAP;AACH,CAND;;AAQA,IAAMqR,uBAAuB,SAAvBA,oBAAuB,CAACN,SAAD,EAAY/Q,SAAZ,EAA0D;AAAA,QAAnCpF,MAAmC,uEAA1B,EAA0B;AAAA,QAAtB0W,YAAsB,uEAAP,EAAO;;AACnF,QAAMC,gBAAgBD,aAAaC,aAAb,IAA8B,EAApD;AACA,QAAMC,YAAYF,aAAatG,QAA/B;;AAEA,QAAMyG,YAAYF,cAAclhB,MAAd,GAAuBkhB,cAAc7X,OAAd,CAAsBqX,SAAtB,MAAqC,CAAC,CAA7D,GAAiE,IAAnF;;AAEAU,iBAAaV,UAAUW,iBAAV,CAA4B1R,SAA5B,EAAuCpF,MAAvC,CAAb;;AAEA,QAAM+W,WAAWZ,UAAUa,SAA3B;AACAD,aAAS5hB,OAAT,CAAiB,UAAC8hB,KAAD,EAAW;AACxB,YAAMC,mBAAmBN,UAAUvI,MAAV,CAAiB;AAAA,mBAAOpB,IAAIkK,YAAJ,KAAqBF,KAA5B;AAAA,SAAjB,CAAzB;AACA,YAAIX,iBAAiBJ,8BAA8B9Q,SAA9B,EAAyC6R,KAAzC,CAArB;;AAEA,YAAIC,iBAAiBzhB,MAArB,EAA6B;AACzB6gB,6BAAiBtD,uBAAuBsD,cAAvB,EAAuCY,gBAAvC,EAAyD;AACtE/D,6BAAa,KADyD;AAEtEC,iCAAiB,IAFqD;AAGtEjQ,uBAAO;AAH+D,aAAzD,CAAjB;AAKH;AACDsT,6BAAqBQ,KAArB,EAA4BX,cAA5B,EAA4CtW,MAA5C,EAAoD0W,YAApD;AACH,KAZD;AAaH,CAtBD;;AAwBO,IAAMhS,sBAAsB,SAAtBA,mBAAsB,CAACG,KAAD,EAAW;AAC1C,WAAOA,MAAMuS,OAAN,IAAiBvS,MAAMoL,WAAN,CAAkBoH,IAAlB,CAAuB;AAAA,eAAKlZ,EAAE+R,EAAF,KAAS1X,yDAAcA,CAACG,OAA7B;AAAA,KAAvB,CAAxB,EAAsF;AAClFkM,gBAAQA,MAAMuS,OAAd;AACH;AACD,WAAOvS,KAAP;AACH,CALM;;AAOA,IAAMP,mBAAmB,SAAnBA,gBAAmB,CAACO,KAAD,EAAW;AACvC,WAAOA,MAAMuS,OAAb,EAAsB;AAClBvS,gBAAQA,MAAMuS,OAAd;AACH;AACD,WAAOvS,KAAP;AACH,CALM;;AAOA,IAAMyS,qBAAqB,SAArBA,kBAAqB,CAACzS,KAAD,EAAsB;AAAA,QAAd2R,IAAc,uEAAP,EAAO;;AACpD,WAAO3R,MAAMuS,OAAb,EAAsB;AAClBZ,aAAKnhB,IAAL,CAAUwP,KAAV;AACAA,gBAAQA,MAAMuS,OAAd;AACH;AACD,WAAOZ,IAAP;AACH,CANM;;AAQA,IAAMzR,2BAA2B,SAA3BA,wBAA2B,CAACjB,WAAD,EAAca,UAAd,EAA0B4S,cAA1B,EAA0CvX,MAA1C,EAAqD;AACzF,QAAIoQ,iBAAJ;AADyF,QAEjF7L,oBAFiF,GAErCgT,cAFqC,CAEjFhT,oBAFiF;AAAA,QAE3DiT,iBAF2D,GAErCD,cAFqC,CAE3DC,iBAF2D;;AAGzF,QAAMtT,sBAAsBqT,eAAepT,QAA3C;AACA,QAAMsT,WAAW,SAAXA,QAAW,CAACC,KAAD,EAAW;AACxB,YAAMrJ,SAASrO,OAAOyX,QAAP,IAAoB;AAAA,mBAAM,IAAN;AAAA,SAAnC;AACA,eAAOpJ,OAAOqJ,KAAP,EAAc1X,MAAd,CAAP;AACH,KAHD;;AAKA,QAAM2X,kBAAkB,SAAlBA,eAAkB,QAA6B;AAAA,YAAlBC,IAAkB,SAA1B5X,MAA0B;AAAA,YAAZ6E,KAAY,SAAZA,KAAY;AAAA,YAC/BgT,IAD+B,GACtBD,IADsB,CACzCxH,QADyC;;AAEjD,YAAI+G,qBAAJ;;AAEA,YAAIU,SAAS,IAAT,IAAiBA,KAAKpb,MAAL,CAAYkW,IAAZ,CAAiB;AAAA,mBAAKxU,EAAE/D,IAAF,KAAW0L,gDAASA,CAAC4B,OAA1B;AAAA,SAAjB,CAArB,EAA0E;AACtEyP,2BAAezS,oBAAoBG,KAApB,CAAf;AACH;AACD,eAAO5P,OAAOgH,MAAP,CAAc,EAAd,EAAkB2b,IAAlB,EAAwB;AAC3BT;AAD2B,SAAxB,CAAP;AAGH,KAVD;;AAYA,QAAIP,YAAY,EAAhB;;AAEA,QAAI9S,gBAAgB,IAApB,EAA0B;AACtB8S,oBAAY,CAAC;AACTxG,sBAAU;AADD,SAAD,CAAZ;AAGAA,mBAAW,EAAX;AACH,KALD,MAKO;AAAA;;AACH,YAAI0H,kBAAkB7iB,OAAOue,MAAP,CAAcjP,qBAAqBwT,cAAnC,CAAtB;AACA,YAAIP,sBAAsB,KAA1B,EAAiC;AAC7BM,8BAAkBA,gBAAgBzJ,MAAhB,CAAuB;AAAA,uBAAKlQ,EAAE6B,MAAF,CAASmE,QAAT,KAAsBD,mBAA3B;AAAA,aAAvB,CAAlB;AACH;;AAED,YAAM8T,mBAAmBF,gBAAgBzJ,MAAhB,CAAuBoJ,QAAvB,CAAzB;;AAEA,YAAMd,gBAAgB,EAAtB;;AAEA,YAAIa,sBAAsB,KAA1B,EAAiC;AAC7B,gBAAMS,wBAAwBhjB,OAAOue,MAAP,CAAcjP,qBAAqBwT,cAAnC,CAA9B;;AAEAE,kCAAsB9iB,OAAtB,CAA8B,UAAC+iB,SAAD,EAAe;AACzC,oBAAMC,aAAaD,UAAUlY,MAA7B;AACA,oBAAImY,WAAWC,aAAX,KAA6B,KAA7B,IAAsCD,WAAWE,MAAX,KAAsBrY,OAAOqY,MAAnE,IACIF,WAAWhU,QAAX,KAAwBD,mBADhC,EACqD;AACjDyS,kCAActhB,IAAd,CAAmB6iB,UAAUrT,KAA7B;AACAuL,+BAAW6H,sBAAsB5J,MAAtB,CAA6B;AAAA,+BAAKlQ,MAAM+Z,SAAX;AAAA,qBAA7B,EAAmD7jB,GAAnD,CAAuDsjB,eAAvD,CAAX;AACAvH,6BAAS3a,MAAT,IAAmBmhB,UAAUvhB,IAAV,CAAe;AAC9B+a,0CAD8B;AAE9BkI,gCAAQJ,UAAUrT,KAFY;AAG9B2R,8BAAMc,mBAAmBY,UAAUrT,KAA7B;AAHwB,qBAAf,CAAnB;AAKH;AACJ,aAZD;AAaH;;AAGDuL,mBAAW,aAAGtY,MAAH,2CAAiBkgB,iBAAiB3jB,GAAjB,CAAqBsjB,eAArB,CAAjB,IAAwD;AAC/DvH,sBAAUtM,WADqD;AAE/DqT,0BAAcrT,gBAAgB,IAAhB,IAAwBA,YAAYrH,MAAZ,CAAmBkW,IAAnB,CAAwB;AAAA,uBAAKxU,EAAE/D,IAAF,KAAW0L,gDAASA,CAAC4B,OAA1B;AAAA,aAAxB,CAAxB,GACVhD,oBAAoB6S,eAAevS,iBAAnC,CADU,GAC8C;AAHG,SAAxD,IAIPqJ,MAJO,CAIA;AAAA,mBAAKlQ,MAAM,IAAX;AAAA,SAJA,CAAX;AAKAyY,kBAAUvhB,IAAV,CAAe;AACX+a,8BADW;AAEXuG,qCAAmBA,aAAnB,qBAAqC3W,OAAO2W,aAAP,IAAwB,EAA7D;AAFW,SAAf;AAIH;;AAED,QAAMtS,YAAYM,WAAWE,KAA7B;;AAEA,QAAMb,aAAa/O,OAAOgH,MAAP,CAAc;AAC7Bsc,2BAAmBzU,WADU;AAE7BI;AAF6B,KAAd,EAGhBlE,MAHgB,CAAnB;;AAKA4W,cAAUzhB,OAAV,CAAkB,UAACqjB,GAAD,EAAS;AAAA,YACLX,IADK,GACIW,GADJ,CACfpI,QADe;;AAEvB,YAAMqI,mBAAmBzF,uBAAuB3O,SAAvB,EAAkCwT,IAAlC,EAAwC;AAC7DzE,6BAAiB,CAAC,CAACyE,KAAKR,IAAL,CAAU;AAAA,uBAAKlZ,EAAEgZ,YAAF,KAAmB9S,SAAxB;AAAA,aAAV;AAD0C,SAAxC,CAAzB;AAGA,YAAMmS,OAAOgC,IAAIhC,IAAjB;;AAEA,YAAIA,IAAJ,EAAU;AACN,gBAAMvC,gBAAgBsC,iBAAiBkC,gBAAjB,EAAmCjC,KAAKkC,OAAL,EAAnC,CAAtB;AACAF,gBAAIF,MAAJ,CAAWxB,iBAAX,CAA6B7C,aAA7B,EAA4CjQ,UAA5C;AACH,SAHD,MAGO;AACHyS,iCAAqBpS,SAArB,EAAgCoU,gBAAhC,EAAkDzU,UAAlD,EAA8D;AAC1D2S,+BAAe6B,IAAI7B,aADuC;AAE1DvG,0BAAUyH;AAFgD,aAA9D;AAIH;AACJ,KAhBD;AAiBH,CA5FM;;AA8FA,IAAM5S,4BAA4B,SAA5BA,yBAA4B,CAACV,oBAAD,EAAuBF,SAAvB,EAAkCkT,cAAlC,EAAqD;AAC1F,QAAMoB,mBAAmBpU,qBAAqBoU,gBAA9C;;AAEA,SAAK,IAAMN,MAAX,IAAqBM,gBAArB,EAAuC;AACnC,YAAMT,YAAYS,iBAAiBN,MAAjB,CAAlB;AACA,YAAMF,aAAaD,UAAUlY,MAA7B;AACA,YAAMkE,sBAAsBqT,eAAevX,MAAf,CAAsBmE,QAAlD;AACA,YAAMyU,wBAAwBrB,eAAevT,UAAf,CAA0B4U,qBAA1B,GAC1BrB,eAAevT,UAAf,CAA0B4U,qBAA1B,CAAgDT,UAAhD,EAA4DZ,eAAevX,MAA3E,CAD0B,GAC2D,IADzF;AAEA,YAAImY,WAAWhU,QAAX,KAAwBD,mBAAxB,IAA+C0U,qBAAnD,EAA0E;AACtE,gBAAMC,gBAAgBV,WAAW/H,QAAjC;AACArL,qCAAyB8T,aAAzB,EAAwC;AACpChU,uBAAOR,SAD6B;AAEpCO,8BAAcF,oBAAoBwT,UAAUrT,KAA9B;AAFsB,aAAxC,EAGG;AACCN,0DADD;AAECiT,mCAAmB,KAFpB;AAGCrT,0BAAUD,mBAHX;AAICc,mCAAmBkT,UAAUrT;AAJ9B,aAHH,EAQGsT,UARH;AASH;AACJ;AACJ,CAtBM;;AAwBA,IAAMrT,qBAAqB,SAArBA,kBAAqB,CAACP,oBAAD,EAA8C;AAAA,QAAvBvE,MAAuB,uEAAd,EAAc;AAAA,QAAV6E,KAAU;;AAC5E,QAAIiU,wBAAJ;AACA,QAAM7U,kBAAkBjE,OAAOiE,eAA/B;AACA,QAAMmM,WAAWpQ,OAAOoQ,QAAxB;AACA,QAAM3d,MAASuN,OAAOqY,MAAhB,SAA0BrY,OAAOmE,QAAvC;;AAEA,QAAIF,eAAJ,EAAqB;AACjB6U,0BAAkBvU,qBAAqBwT,cAAvC;AACH,KAFD,MAEO;AACHe,0BAAkBvU,qBAAqBoU,gBAAvC;AACH;;AAED,QAAIvI,aAAa,IAAjB,EAAuB;AACnB,eAAO0I,gBAAgBrmB,GAAhB,CAAP;AACH,KAFD,MAEO;AACHqmB,wBAAgBrmB,GAAhB,IAAuB;AACnBoS,wBADmB;AAEnB7E;AAFmB,SAAvB;AAIH;;AAED,WAAO,KAAP;AACH,CAtBM;;AAyBA,IAAMkH,yBAAyB,SAAzBA,sBAAyB,CAAC2N,SAAD,EAAY9N,SAAZ,EAAuBD,WAAvB,EAAuC;AACzE,QAAMiS,sBAAsBlE,UAAUvY,MAAV,CAAiB,UAACC,GAAD,EAAMG,KAAN,EAAgB;AACzD,YAAIA,MAAMqE,WAAN,CAAkBzM,IAAlB,KAA2B,QAA/B,EAAyC;AACrCiI,gBAAIlH,IAAJ,+BAAY0R,UAAUsH,MAAV,CAAiB;AAAA,uBAAa7M,UAAUwX,MAAV,CAAiBtc,KAAjB,MAA4B,CAAC,CAA1C;AAAA,aAAjB,CAAZ;AACH,SAFD,MAEO,IAAIA,SAASoK,WAAb,EAA0B;AAC7BvK,gBAAIlH,IAAJ,CAASqH,KAAT;AACH;AACD,eAAOH,GAAP;AACH,KAP2B,EAOzB,EAPyB,CAA5B;AAQA,WAAO7G,MAAMujB,IAAN,CAAW,IAAIjO,GAAJ,CAAQ+N,mBAAR,CAAX,EAAyC1kB,GAAzC,CAA6C;AAAA,eAASqI,MAAM/J,IAAN,EAAT;AAAA,KAA7C,CAAP;AACH,CAVM;;AAYP;;;;;;;AAOO,IAAMumB,wBAAwB,SAAxBA,qBAAwB,CAACxc,KAAD,EAAQhK,KAAR,EAAkB;AACnD,QAAIgK,MAAMqQ,YAAV,EAAwB;AACpB,eAAOrQ,MAAMqQ,YAAN,GAAqBra,KAArB,CAAP;AACH;AACD,WAAOA,KAAP;AACH,CALM,C;;;;;;;;;;;ACvuBP,IAAM2K,YAAY8b,mBAAOA,CAAC,iCAAR,CAAlB;;AAEAC,OAAOC,OAAP,GAAiBhc,UAAUic,OAAV,GAAoBjc,UAAUic,OAA9B,GAAwCjc,SAAzD,C;;;;;;;;;;;;;;;;;ACFA;;;;;;IAMM+J,iB;;;;AACF;;;;;;;yCAOyBpH,M,EAAQ;AAC7B,gBAAI,CAACA,MAAL,EAAa;AACT,uBAAOoH,kBAAkBmS,oBAAzB;AACH;AACD,mBAAOtkB,OAAOgH,MAAP,CAAcmL,kBAAkBmS,oBAAhC,EAAsDvZ,MAAtD,CAAP;AACH;;AAED;;;;;;;;;AAMA,+BAAatN,KAAb,EAAoB;AAAA;;AAChB,aAAK8mB,MAAL,GAAc9mB,KAAd;AACH;;AAED;;;;;;;;;;gCAMS;AACL,mBAAO,KAAK8mB,MAAZ;AACH;;AAED;;;;;;;;;mCAMY;AACR,mBAAOrM,OAAO,KAAKqM,MAAZ,CAAP;AACH;;;kCAEgBvM,G,EAAK;AAClB,mBAAQA,eAAe7F,iBAAhB,IAAsC,CAAC,CAACA,kBAAkBC,gBAAlB,GAAqC4F,GAArC,CAA/C;AACH;;;uCAEqBA,G,EAAK;AACvB,mBAAOA,eAAe7F,iBAAf,GAAmC6F,GAAnC,GAAyC7F,kBAAkBC,gBAAlB,GAAqC4F,GAArC,CAAhD;AACH;;;;;;AAGL;;;;;AAGA7F,kBAAkBqS,IAAlB,GAAyB,IAAIrS,iBAAJ,CAAsB,MAAtB,CAAzB;AACAA,kBAAkBqG,EAAlB,GAAuB,IAAIrG,iBAAJ,CAAsB,IAAtB,CAAvB;AACAA,kBAAkBsS,GAAlB,GAAwB,IAAItS,iBAAJ,CAAsB,KAAtB,CAAxB;;AAEA;;;;;AAKAA,kBAAkBmS,oBAAlB,GAAyC;AACrCI,aAASvS,kBAAkBqG,EADU;AAErCmM,SAAKxS,kBAAkBsS,GAFc;AAGrCG,UAAMzS,kBAAkBqS,IAHa;AAIrCra,eAAWgI,kBAAkBqG;AAJQ,CAAzC;;AAOerG,gFAAf,E;;;;;;;;;;;;;;;;;;AC/EA;AACA;;AAEA,IAAM0S,kBAAkB,SAAlBA,eAAkB,CAACC,OAAD,EAAUra,KAAV,EAAiBC,GAAjB,EAAyB;AAC7C,QAAMqa,UAAU,EAAhB;AACA,QAAIpb,OAAOc,KAAX;;AAEA,WAAOd,OAAOe,GAAd,EAAmB;AACfqa,gBAAQ3kB,IAAR,CAAauJ,IAAb;AACAA,gBAAQmb,OAAR;AACH;AACDC,YAAQ3kB,IAAR,CAAauJ,IAAb;;AAEA,WAAOob,OAAP;AACH,CAXD;;AAaA,IAAMC,kBAAkB,SAAlBA,eAAkB,CAACC,YAAD,EAAexnB,KAAf,EAAyB;AAC7C,QAAIynB,UAAU,CAAd;AACA,QAAIC,WAAWF,aAAazkB,MAAb,GAAsB,CAArC;AACA,QAAI4kB,eAAJ;AACA,QAAI9G,cAAJ;;AAEA;AACA,WAAO4G,WAAWC,QAAlB,EAA4B;AACxBC,iBAASF,UAAUtL,KAAKyL,KAAL,CAAW,CAACF,WAAWD,OAAZ,IAAuB,CAAlC,CAAnB;AACA5G,gBAAQ2G,aAAaG,MAAb,CAAR;;AAEA,YAAI3nB,SAAS6gB,MAAM7T,KAAf,IAAwBhN,QAAQ6gB,MAAM5T,GAA1C,EAA+C;AAC3C,mBAAO4T,KAAP;AACH,SAFD,MAEO,IAAI7gB,SAAS6gB,MAAM5T,GAAnB,EAAwB;AAC3Bwa,sBAAUE,SAAS,CAAnB;AACH,SAFM,MAEA,IAAI3nB,QAAQ6gB,MAAM7T,KAAlB,EAAyB;AAC5B0a,uBAAWC,SAAS,CAApB;AACH;AACJ;;AAED,WAAO,IAAP;AACH,CArBD;;AAuBC;;;;;;;;AAQM,SAAS3U,qBAAT,CAAgCD,YAAhC,EAA8CpG,UAA9C,EAA0DW,MAA1D,EAAkE;AAAA,QAC/Dga,OAD+D,GACnBha,MADmB,CAC/Dga,OAD+D;AAAA,QACtDO,SADsD,GACnBva,MADmB,CACtDua,SADsD;AAAA,QAC3CR,OAD2C,GACnB/Z,MADmB,CAC3C+Z,OAD2C;AAAA,QAClCra,KADkC,GACnBM,MADmB,CAClCN,KADkC;AAAA,QAC3BC,GAD2B,GACnBK,MADmB,CAC3BL,GAD2B;;AAAA,+BAEhD8F,aAAawF,MAAb,EAFgD;AAAA;AAAA,QAE9DuP,IAF8D;AAAA,QAExDC,IAFwD;;AAIrE,QAAI,CAACT,OAAL,EAAc;AACVta,gBAASA,UAAU,CAAV,KAAgB,CAACA,KAAD,IAAUA,QAAQ8a,IAAlC,CAAD,GAA4CA,IAA5C,GAAmD9a,KAA3D;AACAC,cAAOA,QAAQ,CAAR,KAAc,CAACA,GAAD,IAAQA,MAAM8a,IAA5B,CAAD,GAAuCA,OAAO,CAA9C,GAAmD9a,GAAzD;;AAEA,YAAI4a,SAAJ,EAAe;AACXR,sBAAUlL,KAAK6L,IAAL,CAAU7L,KAAK8L,GAAL,CAAShb,MAAMD,KAAf,IAAwB6a,SAAlC,CAAV;AACH;;AAEDP,kBAAUF,gBAAgBC,OAAhB,EAAyBra,KAAzB,EAAgCC,GAAhC,CAAV;AACH;;AAED,QAAIqa,QAAQ,CAAR,IAAaQ,IAAjB,EAAuB;AACnBR,gBAAQY,OAAR,CAAgBJ,IAAhB;AACH;AACD,QAAIR,QAAQA,QAAQvkB,MAAR,GAAiB,CAAzB,KAA+BglB,IAAnC,EAAyC;AACrCT,gBAAQ3kB,IAAR,CAAaolB,OAAO,CAApB;AACH;;AAED,QAAMP,eAAe,EAArB;AACA,SAAK,IAAI3lB,IAAI,CAAb,EAAgBA,IAAIylB,QAAQvkB,MAAR,GAAiB,CAArC,EAAwClB,GAAxC,EAA6C;AACzC2lB,qBAAa7kB,IAAb,CAAkB;AACdqK,mBAAOsa,QAAQzlB,CAAR,CADO;AAEdoL,iBAAKqa,QAAQzlB,IAAI,CAAZ;AAFS,SAAlB;AAIH;;AAED,QAAMoR,aAAa,EAAnB;AACAjC,oFAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClC,YAAM2K,QAAQuG,aAAa5D,YAAb,CAA0BhH,IAA1B,CAA+BtG,CAA/B,CAAd;AACA,YAAI2K,iBAAiBkI,4DAArB,EAAwC;AACpCzB,uBAAWtQ,IAAX,CAAgB6J,KAAhB;AACA;AACH;;AAED,YAAMqU,QAAQ0G,gBAAgBC,YAAhB,EAA8Bhb,KAA9B,CAAd;AACAyG,mBAAWtQ,IAAX,CAAmBke,MAAM7T,KAAzB,SAAkC6T,MAAM5T,GAAxC;AACH,KATD;;AAWA,WAAO,EAAEgG,sBAAF,EAAcC,MAAMoU,OAApB,EAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;;AC1FD;AACA;;AAEA;;;;;;;;;;;;;;AAcA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0CO,IAAMrR,SAAS,SAATA,MAAS;AAAA,sCAAIrL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAMud,GAAGlS,MAAH,WAAarL,IAAb,CAAN;AAAA,KAAb;AAAA,CAAf;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8BO,IAAMsL,UAAU,SAAVA,OAAU;AAAA,uCAAItL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAMud,GAAGjS,OAAH,WAActL,IAAd,CAAN;AAAA,KAAb;AAAA,CAAhB;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyDO,IAAMoL,MAAM,SAANA,GAAM;AAAA,uCAAIpL,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAMud,GAAGnS,GAAH,WAAUpL,IAAV,CAAN;AAAA,KAAb;AAAA,CAAZ;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;AA2BO,IAAM+C,UAAU,SAAVA,OAAU;AAAA,uCAAI/C,IAAJ;AAAIA,YAAJ;AAAA;;AAAA,WAAa;AAAA,eAAMud,GAAGxa,OAAH,WAAc/C,IAAd,CAAN;AAAA,KAAb;AAAA,CAAhB;;AAEP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCO,IAAMmL,UAAU,SAAVA,OAAU;AAAA,uCAAIqS,UAAJ;AAAIA,kBAAJ;AAAA;;AAAA,WACnB,UAACD,EAAD,EAAsC;AAAA,YAAjC7a,MAAiC,uEAAxB,EAAEC,WAAW,IAAb,EAAwB;;AAClC,YAAI8a,YAAYF,EAAhB;AACA,YAAIG,mBAAJ;AACA,YAAM5E,cAAc,EAApB;;AAEA0E,mBAAW3lB,OAAX,CAAmB,UAAC4a,SAAD,EAAe;AAC9BgL,wBAAYhL,UAAUgL,SAAV,CAAZ;AACA3E,wBAAY/gB,IAAZ,uCAAoB0lB,UAAU9K,WAA9B;AACA,gBAAI,CAAC+K,UAAL,EAAiB;AACbA,6BAAaD,SAAb;AACH;AACJ,SAND;;AAQA,YAAIC,cAAcA,eAAeD,SAAjC,EAA4C;AACxCC,uBAAWC,OAAX;AACH;;AAED;AACAF,kBAAUvK,mBAAV,GAAgC,EAAhC;AACAlQ,0EAAkBA,CACdua,EADJ,EAEIE,SAFJ,EAGIviB,yDAAcA,CAACI,OAHnB,EAII,IAJJ,EAKIwd,WALJ;;AAQA,YAAIpW,OAAOC,SAAX,EAAsB;AAClB8a,sBAAUta,SAAV,CAAoBoa,EAApB;AACH,SAFD,MAEO;AACHE,sBAAUta,SAAV,CAAoB,IAApB;AACH;;AAED,eAAOsa,SAAP;AACH,KAnCkB;AAAA,CAAhB,C;;;;;;;;;;;;ACvNP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;AAKA,SAASG,eAAT,GAA2B;AAAE,WAAO,IAAP;AAAc;;AAE3C;;;;;;;;;;AAUO,SAASpS,YAAT,CAAuBqS,GAAvB,EAA4BC,GAA5B,EAAiC3D,QAAjC,EAAgG;AAAA,QAArD4D,mBAAqD,uEAA/B,KAA+B;AAAA,QAAxBC,QAAwB,uEAAbtiB,gDAAKA,CAACC,KAAO;;AACnG,QAAM6B,SAAS,EAAf;AACA,QAAMD,OAAO,EAAb;AACA,QAAM0gB,qBAAqB9D,YAAYyD,eAAvC;AACA,QAAMM,gBAAgBL,IAAIla,aAAJ,EAAtB;AACA,QAAMwa,gBAAgBL,IAAIna,aAAJ,EAAtB;AACA,QAAMya,oBAAoBF,cAAclnB,IAAxC;AACA,QAAMqnB,oBAAoBF,cAAcnnB,IAAxC;AACA,QAAMA,OAAUknB,cAAclnB,IAAxB,SAAgCmnB,cAAcnnB,IAApD;AACA,QAAMsnB,mBAAmBC,0EAAeA,CAACL,aAAhB,EAA+BC,aAA/B,CAAzB;;AAEA,QAAIC,sBAAsBC,iBAA1B,EAA6C;AACzC,cAAM,IAAIpgB,KAAJ,CAAU,4CAAV,CAAN;AACH;AACD;AACAigB,kBAAc/e,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,YAAMof,YAAY5G,sDAAOA,CAAC,EAAR,EAAYxY,MAAM5B,MAAN,EAAZ,CAAlB;AACA,YAAI8gB,iBAAiB9c,OAAjB,CAAyBgd,UAAUxnB,IAAnC,MAA6C,CAAC,CAA9C,IAAmD,CAAC+mB,mBAAxD,EAA6E;AACzES,sBAAUxnB,IAAV,GAAoBknB,cAAclnB,IAAlC,SAA0CwnB,UAAUxnB,IAApD;AACH;AACDwG,eAAOzF,IAAP,CAAYymB,SAAZ;AACH,KAND;AAOAL,kBAAchf,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,YAAMof,YAAY5G,sDAAOA,CAAC,EAAR,EAAYxY,MAAM5B,MAAN,EAAZ,CAAlB;AACA,YAAI8gB,iBAAiB9c,OAAjB,CAAyBgd,UAAUxnB,IAAnC,MAA6C,CAAC,CAAlD,EAAqD;AACjD,gBAAI,CAAC+mB,mBAAL,EAA0B;AACtBS,0BAAUxnB,IAAV,GAAoBmnB,cAAcnnB,IAAlC,SAA0CwnB,UAAUxnB,IAApD;AACAwG,uBAAOzF,IAAP,CAAYymB,SAAZ;AACH;AACJ,SALD,MAKO;AACHhhB,mBAAOzF,IAAP,CAAYymB,SAAZ;AACH;AACJ,KAVD;;AAYA;AACApY,oFAAkBA,CAACyX,IAAIjd,WAAvB,EAAoC,UAAC3J,CAAD,EAAO;AACvC,YAAIwnB,WAAW,KAAf;AACA,YAAIC,oBAAJ;AACAtY,wFAAkBA,CAAC0X,IAAIld,WAAvB,EAAoC,UAAC+d,EAAD,EAAQ;AACxC,gBAAMC,QAAQ,EAAd;AACA,gBAAMC,UAAU,EAAhB;AACAA,oBAAQT,iBAAR,IAA6B,EAA7B;AACAS,oBAAQR,iBAAR,IAA6B,EAA7B;AACAH,0BAAc/e,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpCwf,sBAAM7mB,IAAN,CAAWqH,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAAX;AACA4nB,wBAAQT,iBAAR,EAA2Bhf,MAAMpI,IAAN,EAA3B,IAA2C;AACvCgb,8BAAU5S,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAD6B;AAEvC8a,oCAAgB3S,MAAMyE,aAAN,GAAsB5M,CAAtB;AAFuB,iBAA3C;AAIH,aAND;AAOAknB,0BAAchf,MAAd,CAAqBtH,OAArB,CAA6B,UAACuH,KAAD,EAAW;AACpC,oBAAI,EAAEkf,iBAAiB9c,OAAjB,CAAyBpC,MAAM5B,MAAN,GAAexG,IAAxC,MAAkD,CAAC,CAAnD,IAAwD+mB,mBAA1D,CAAJ,EAAoF;AAChFa,0BAAM7mB,IAAN,CAAWqH,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBohB,EAAxB,CAAX;AACH;AACDE,wBAAQR,iBAAR,EAA2Bjf,MAAMpI,IAAN,EAA3B,IAA2C;AACvCgb,8BAAU5S,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBohB,EAAxB,CAD6B;AAEvC5M,oCAAgB3S,MAAMyE,aAAN,GAAsB8a,EAAtB;AAFuB,iBAA3C;AAIH,aARD;;AAUA,gBAAI3Y,cAAc,EAAlB;AACA,gBAAI8Y,iBAAiB,SAAjBA,cAAiB;AAAA,uBAAMjB,IAAI3X,YAAJ,EAAN;AAAA,aAArB;AACA,gBAAI6Y,iBAAiB,SAAjBA,cAAiB;AAAA,uBAAMjB,IAAI5X,YAAJ,EAAN;AAAA,aAArB;;AAEA,gBAAM8Y,YAAYlN,+DAAeA,CAAC+M,QAAQT,iBAAR,CAAhB,CAAlB;AACA,gBAAMa,YAAYnN,+DAAeA,CAAC+M,QAAQR,iBAAR,CAAhB,CAAlB;AACA,gBAAIJ,mBAAmBe,SAAnB,EAA8BC,SAA9B,EAAyCH,cAAzC,EAAyDC,cAAzD,EAAyE/Y,WAAzE,CAAJ,EAA2F;AACvF,oBAAMkZ,WAAW,EAAjB;AACAN,sBAAM/mB,OAAN,CAAc,UAACsnB,OAAD,EAAUC,GAAV,EAAkB;AAC5BF,6BAAS1hB,OAAO4hB,GAAP,EAAYpoB,IAArB,IAA6BmoB,OAA7B;AACH,iBAFD;AAGA,oBAAIV,YAAY/iB,gDAAKA,CAACC,KAAN,KAAgBqiB,QAAhC,EAA0C;AACtCzgB,yBAAKmhB,WAAL,IAAoBQ,QAApB;AACH,iBAFD,MAGK;AACD3hB,yBAAKxF,IAAL,CAAUmnB,QAAV;AACAT,+BAAW,IAAX;AACAC,kCAAcznB,CAAd;AACH;AACJ,aAbD,MAaO,IAAI,CAAC+mB,aAAatiB,gDAAKA,CAACE,SAAnB,IAAgCoiB,aAAatiB,gDAAKA,CAACG,UAApD,KAAmE,CAAC4iB,QAAxE,EAAkF;AACrF,oBAAMS,YAAW,EAAjB;AACA,oBAAIlK,MAAMkJ,cAAc/e,MAAd,CAAqBhH,MAArB,GAA8B,CAAxC;AACAymB,sBAAM/mB,OAAN,CAAc,UAACsnB,OAAD,EAAUC,GAAV,EAAkB;AAC5B,wBAAIA,OAAOpK,GAAX,EAAgB;AACZkK,kCAAS1hB,OAAO4hB,GAAP,EAAYpoB,IAArB,IAA6BmoB,OAA7B;AACH,qBAFD,MAGK;AACDD,kCAAS1hB,OAAO4hB,GAAP,EAAYpoB,IAArB,IAA6B,IAA7B;AACH;AACJ,iBAPD;AAQAynB,2BAAW,IAAX;AACAC,8BAAcznB,CAAd;AACAsG,qBAAKxF,IAAL,CAAUmnB,SAAV;AACH;AACJ,SAxDD;AAyDH,KA5DD;;AA8DA,WAAO,IAAInf,kDAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;;;;;;;;;ACzHD;AACA;AACA;AACA;;AAEA;;;;;;;;;;;AAWO,SAAS0J,WAAT,CAAsBiM,UAAtB,EAAkC5K,UAAlC,EAA8CoQ,aAA9C,EAA6D/O,cAA7D,EAA6E3F,OAA7E,EAAsF;AACzF,QAAMyC,aAAa;AACfc,gBAAQ,KADO;AAEfD,oBAAY;AAFG,KAAnB;AAIAtD,cAAU9F,OAAOgH,MAAP,CAAc,EAAd,EAAkBuB,UAAlB,EAA8BzC,OAA9B,CAAV;;AAEA,QAAM4hB,SAAS;AACX7hB,gBAAQ,EADG;AAEXD,cAAM,EAFK;AAGX0D,cAAM;AAHK,KAAf;AAKA,QAAMD,SAASvD,QAAQuD,MAAvB;AACA,QAAMse,aAAalc,kBAAkBA,eAAejL,MAAf,GAAwB,CAA7D;AACA;AACA,QAAMonB,aAAa,EAAnB;AACA;AACA,QAAMC,UAAUrN,cAAcjQ,KAAd,CAAoB,GAApB,CAAhB;;AAEAsd,YAAQ3nB,OAAR,CAAgB,UAAC4nB,OAAD,EAAa;AACzB,aAAK,IAAIxoB,IAAI,CAAb,EAAgBA,IAAI0V,WAAWxU,MAA/B,EAAuClB,KAAK,CAA5C,EAA+C;AAC3C,gBAAI0V,WAAW1V,CAAX,EAAcD,IAAd,OAAyByoB,OAA7B,EAAsC;AAClCF,2BAAWxnB,IAAX,CAAgB4U,WAAW1V,CAAX,CAAhB;AACA;AACH;AACJ;AACJ,KAPD;;AASA;AACAsoB,eAAW1nB,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B;AACAigB,eAAO7hB,MAAP,CAAczF,IAAd,CAAmBqH,MAAM5B,MAAN,EAAnB;AACH,KAHD;;AAKA,QAAIwD,MAAJ,EAAY;AACRqe,eAAO7hB,MAAP,CAAczF,IAAd,CAAmB;AACff,kBAAMiE,iDADS;AAEf6B,kBAAM0L,gDAASA,CAACC;AAFD,SAAnB;AAIH;;AAEDrC,oFAAkBA,CAACrE,UAAnB,EAA+B,UAAC9K,CAAD,EAAO;AAClCooB,eAAO9hB,IAAP,CAAYxF,IAAZ,CAAiB,EAAjB;AACA,YAAM2nB,YAAYL,OAAO9hB,IAAP,CAAYpF,MAAZ,GAAqB,CAAvC;AACA,YAAIiK,QAAQ,CAAZ;AACAmd,mBAAW1nB,OAAX,CAAmB,UAACuH,KAAD,EAAQuf,EAAR,EAAe;AAC9BU,mBAAO9hB,IAAP,CAAYmiB,SAAZ,EAAuBf,KAAKvc,KAA5B,IAAqChD,MAAMmF,YAAN,CAAmBhH,IAAnB,CAAwBtG,CAAxB,CAArC;AACH,SAFD;AAGA,YAAI+J,MAAJ,EAAY;AACRqe,mBAAO9hB,IAAP,CAAYmiB,SAAZ,EAAuBH,WAAWpnB,MAAlC,IAA4ClB,CAA5C;AACH;AACD;AACAooB,eAAOpe,IAAP,CAAYlJ,IAAZ,CAAiBd,CAAjB;;AAEA;AACA;AACA,YAAIqoB,UAAJ,EAAgB;AAAED,mBAAO9hB,IAAP,CAAYmiB,SAAZ,EAAuB3nB,IAAvB,CAA4Bd,CAA5B;AAAiC;AACtD,KAhBD;;AAkBA;AACA,QAAIqoB,UAAJ,EAAgB;AACZK,8DAAQA,CAACN,MAAT,EAAiBjc,cAAjB;AACH;;AAED,QAAI3F,QAAQsD,UAAZ,EAAwB;AACpB,YAAM6e,UAAUxnB,0CAASA,MAAMinB,OAAO7hB,MAAP,CAAcrF,MAApB,CAAT,GAAsCpB,GAAtC,CAA0C;AAAA,mBAAM,EAAN;AAAA,SAA1C,CAAhB;AACAsoB,eAAO9hB,IAAP,CAAY1F,OAAZ,CAAoB,UAAC+mB,KAAD,EAAW;AAC3BA,kBAAM/mB,OAAN,CAAc,UAAC0F,IAAD,EAAOtG,CAAP,EAAa;AACvB2oB,wBAAQ3oB,CAAR,EAAWc,IAAX,CAAgBwF,IAAhB;AACH,aAFD;AAGH,SAJD;AAKA8hB,eAAO9hB,IAAP,GAAcqiB,OAAd;AACH;;AAED,WAAOP,MAAP;AACH,C;;;;;;;;;;;;AC3FD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;AAEA;;;;;;;;;AASO,SAAS5T,UAAT,CAAqBoS,GAArB,EAA0BC,GAA1B,EAA+B;AAClC,QAAM+B,YAAY,EAAlB;AACA,QAAMriB,SAAS,EAAf;AACA,QAAMsiB,gBAAgB,EAAtB;AACA,QAAMviB,OAAO,EAAb;AACA,QAAM2gB,gBAAgBL,IAAIla,aAAJ,EAAtB;AACA,QAAMwa,gBAAgBL,IAAIna,aAAJ,EAAtB;AACA,QAAMoc,wBAAwB7B,cAAc1Z,SAAd,EAA9B;AACA,QAAMwb,wBAAwB7B,cAAc3Z,SAAd,EAA9B;AACA,QAAMxN,OAAUknB,cAAclnB,IAAxB,eAAsCmnB,cAAcnnB,IAA1D;;AAED;AACC,QAAI,CAACipB,gEAAUA,CAACpC,IAAI/c,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAX,EAAiDud,IAAIhd,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAjD,CAAL,EAA6F;AACzF,eAAO,IAAP;AACH;;AAED;AACCsd,QAAI/c,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,CAAD,CAAgCrK,OAAhC,CAAwC,UAACqM,SAAD,EAAe;AACnD,YAAM9E,QAAQ2gB,sBAAsB7b,SAAtB,CAAd;AACA1G,eAAOzF,IAAP,CAAY6f,sDAAOA,CAAC,EAAR,EAAYxY,MAAM5B,MAAN,EAAZ,CAAZ;AACAsiB,sBAAc/nB,IAAd,CAAmBqH,MAAM5B,MAAN,GAAexG,IAAlC;AACH,KAJD;;AAMA;;;;;;;AAOA,aAASkpB,iBAAT,CAA2B3C,EAA3B,EAA+B/Y,SAA/B,EAA0C2b,OAA1C,EAAmD;AAC/C/Z,wFAAkBA,CAACmX,GAAG3c,WAAtB,EAAmC,UAAC3J,CAAD,EAAO;AACtC,gBAAM2nB,QAAQ,EAAd;AACA,gBAAIwB,WAAW,EAAf;AACAN,0BAAcjoB,OAAd,CAAsB,UAACwoB,UAAD,EAAgB;AAClC,oBAAMjrB,QAAQoP,UAAU6b,UAAV,EAAsB9b,YAAtB,CAAmChH,IAAnC,CAAwCtG,CAAxC,CAAd;AACAmpB,kCAAgBhrB,KAAhB;AACAwpB,sBAAMyB,UAAN,IAAoBjrB,KAApB;AACH,aAJD;AAKA,gBAAI,CAACyqB,UAAUO,QAAV,CAAL,EAA0B;AACtB,oBAAID,OAAJ,EAAa;AAAE5iB,yBAAKxF,IAAL,CAAU6mB,KAAV;AAAmB;AAClCiB,0BAAUO,QAAV,IAAsB,IAAtB;AACH;AACJ,SAZD;AAaH;;AAED;AACAF,sBAAkBpC,GAAlB,EAAuBkC,qBAAvB,EAA8C,KAA9C;AACAE,sBAAkBrC,GAAlB,EAAuBkC,qBAAvB,EAA8C,IAA9C;;AAEA,WAAO,IAAIhgB,kDAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;ACjED;AAAA;AAAA;;;;;;;;AAQO,SAASunB,eAAT,CAA0B+B,GAA1B,EAA+BC,GAA/B,EAAoC;AACvC,QAAMC,SAAS,EAAf;AACA,QAAMC,SAAS,EAAf;AACAH,QAAInhB,MAAJ,CAAWtH,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1BqhB,eAAO1oB,IAAP,CAAYqH,MAAM5B,MAAN,GAAexG,IAA3B;AACH,KAFD;AAGAupB,QAAIphB,MAAJ,CAAWtH,OAAX,CAAmB,UAACuH,KAAD,EAAW;AAC1B,YAAIqhB,OAAOjf,OAAP,CAAepC,MAAM5B,MAAN,GAAexG,IAA9B,MAAwC,CAAC,CAA7C,EAAgD;AAC5CwpB,mBAAOzoB,IAAP,CAAYqH,MAAM5B,MAAN,GAAexG,IAA3B;AACH;AACJ,KAJD;AAKA,WAAOwpB,MAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;;;;;ACpBD;AACA;AACA;;IAEQhW,G,GAAgDD,yD,CAAhDC,G;IAAKC,G,GAA2CF,yD,CAA3CE,G;IAAKG,K,GAAsCL,yD,CAAtCK,K;IAAOC,I,GAA+BN,yD,CAA/BM,I;IAAMC,K,GAAyBP,yD,CAAzBO,K;IAAOC,G,GAAkBR,yD,CAAlBQ,G;IAAKL,G,GAAaH,yD,CAAbG,G;IAAKC,G,GAAQJ,yD,CAARI,G;;;AAEhD,SAAS+V,iBAAT,CAA2BriB,GAA3B,EAAgC;AAC5B,WAAOA,IAAI0S,MAAJ,CAAW;AAAA,eAAQ,EAAElR,gBAAgBiK,4DAAlB,CAAR;AAAA,KAAX,CAAP;AACH;AACD;;;;;;;AAOA,SAAS6W,GAAT,CAActiB,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C,YAAMwoB,iBAAiBF,kBAAkBriB,GAAlB,CAAvB;AACA,YAAMwiB,WAAWD,eAAezoB,MAAf,GACGyoB,eAAe5hB,MAAf,CAAsB,UAACC,GAAD,EAAM6hB,IAAN;AAAA,mBAAe7hB,MAAM6hB,IAArB;AAAA,SAAtB,EAAiD,CAAjD,CADH,GAEKhX,4DAAiBA,CAACqS,IAFxC;AAGA,eAAO0E,QAAP;AACH;AACD,WAAO/W,4DAAiBA,CAACqS,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS4E,GAAT,CAAc1iB,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C,YAAMyoB,WAAWF,IAAItiB,GAAJ,CAAjB;AACA,YAAM2W,MAAM3W,IAAIlG,MAAJ,IAAc,CAA1B;AACA,eAAQgK,OAAO3M,KAAP,CAAaqrB,QAAb,KAA0BA,oBAAoB/W,4DAA/C,GACEA,4DAAiBA,CAACqS,IADpB,GAC2B0E,WAAW7L,GAD7C;AAEH;AACD,WAAOlL,4DAAiBA,CAACqS,IAAzB;AACH;;AAED;;;;;;;AAOA,SAASjN,GAAT,CAAc7Q,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C;AACA,YAAM4oB,iBAAiBN,kBAAkBriB,GAAlB,CAAvB;;AAEA,eAAQ2iB,eAAe7oB,MAAhB,GAA0BoZ,KAAKrC,GAAL,gCAAY8R,cAAZ,EAA1B,GAAwDlX,4DAAiBA,CAACqS,IAAjF;AACH;AACD,WAAOrS,4DAAiBA,CAACqS,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS/M,GAAT,CAAc/Q,GAAd,EAAmB;AACf,QAAIC,sDAAOA,CAACD,GAAR,KAAgB,EAAEA,IAAI,CAAJ,aAAkBjG,KAApB,CAApB,EAAgD;AAC5C;AACA,YAAM4oB,iBAAiBN,kBAAkBriB,GAAlB,CAAvB;;AAEA,eAAQ2iB,eAAe7oB,MAAhB,GAA0BoZ,KAAKnC,GAAL,gCAAY4R,cAAZ,EAA1B,GAAwDlX,4DAAiBA,CAACqS,IAAjF;AACH;AACD,WAAOrS,4DAAiBA,CAACqS,IAAzB;AACH;;AAED;;;;;;;AAOA,SAAS8E,KAAT,CAAgB5iB,GAAhB,EAAqB;AACjB,WAAOA,IAAI,CAAJ,CAAP;AACH;;AAED;;;;;;;AAOA,SAAS6iB,IAAT,CAAe7iB,GAAf,EAAoB;AAChB,WAAOA,IAAIA,IAAIlG,MAAJ,GAAa,CAAjB,CAAP;AACH;;AAED;;;;;;;AAOA,SAASgpB,KAAT,CAAgB9iB,GAAhB,EAAqB;AACjB,QAAIC,sDAAOA,CAACD,GAAR,CAAJ,EAAkB;AACd,eAAOA,IAAIlG,MAAX;AACH;AACD,WAAO2R,4DAAiBA,CAACqS,IAAzB;AACH;;AAED;;;;;;AAMA,SAASiF,QAAT,CAAmB/iB,GAAnB,EAAwB;AACpB,QAAIgjB,OAAON,IAAI1iB,GAAJ,CAAX;AACA,WAAO0iB,IAAI1iB,IAAItH,GAAJ,CAAQ;AAAA,wBAAQuqB,MAAMD,IAAd,EAAuB,CAAvB;AAAA,KAAR,CAAJ,CAAP;AACH;;AAED;;;;;;;AAOA,SAASE,GAAT,CAAcljB,GAAd,EAAmB;AACf,WAAOkT,KAAKiQ,IAAL,CAAUJ,SAAS/iB,GAAT,CAAV,CAAP;AACH;;AAGD,IAAMojB,iDACDjX,GADC,EACKmW,GADL,4BAEDlW,GAFC,EAEKsW,GAFL,4BAGDrW,GAHC,EAGKwE,GAHL,4BAIDvE,GAJC,EAIKyE,GAJL,4BAKDxE,KALC,EAKOqW,KALP,4BAMDpW,IANC,EAMMqW,IANN,4BAODpW,KAPC,EAOOqW,KAPP,4BAQDpW,GARC,EAQKwW,GARL,WAAN;;AAWA,IAAM/R,qBAAqBhF,GAA3B;;;;;;;;;;;;;;;;;;;;;;;;;;ACpJA;AACA;AACA;AACA;AACA;AACA;;AAEA;;;;;;;AAOA,SAASkX,WAAT,CAAsB7I,SAAtB,EAAiChM,QAAjC,EAA2C;AACvC,QAAM2T,SAAS,EAAf;AACA,QAAM7T,aAAakM,UAAUlV,aAAV,EAAnB;AACA,QAAMge,aAAahV,WAAWO,YAAX,EAAnB;;AAEAvV,WAAOka,OAAP,CAAe8P,UAAf,EAA2B9pB,OAA3B,CAAmC,gBAAW;AAAA;AAAA,YAAT1C,GAAS;;AAC1C,YAAI0X,YAAYA,SAAS1U,MAAzB,EAAiC;AAC7B,gBAAI0U,SAASrL,OAAT,CAAiBrM,GAAjB,MAA0B,CAAC,CAA/B,EAAkC;AAC9BqrB,uBAAOzoB,IAAP,CAAY5C,GAAZ;AACH;AACJ,SAJD,MAIO;AACHqrB,mBAAOzoB,IAAP,CAAY5C,GAAZ;AACH;AACJ,KARD;;AAUA,WAAOqrB,MAAP;AACH;;AAED;;;;;;;AAOA,SAASoB,aAAT,CAAwB/I,SAAxB,EAAkD;AAAA,QAAfpW,QAAe,uEAAJ,EAAI;;AAC9C,QAAM4c,SAAS,EAAf;AACA,QAAM1S,aAAakM,UAAUlV,aAAV,EAAnB;AACA,QAAMke,WAAWlV,WAAWK,UAAX,EAAjB;AACA,QAAM8U,aAAa5e,4DAAYA,CAACD,cAAb,EAAnB;;AAEAtL,WAAOmI,IAAP,CAAY+hB,QAAZ,EAAsBhqB,OAAtB,CAA8B,UAACkqB,WAAD,EAAiB;AAC3C,YAAI,OAAOtf,SAASsf,WAAT,CAAP,KAAiC,QAArC,EAA+C;AAC3Ctf,qBAASsf,WAAT,IAAwBF,SAASE,WAAT,EAAsBxS,QAAtB,EAAxB;AACH;AACD,YAAMvG,YAAY9F,4DAAYA,CAAC8e,OAAb,CAAqBvf,SAASsf,WAAT,CAArB,CAAlB;AACA,YAAI/Y,SAAJ,EAAe;AACXqW,mBAAO0C,WAAP,IAAsB/Y,SAAtB;AACH,SAFD,MAEO;AACHqW,mBAAO0C,WAAP,IAAsBD,UAAtB;AACArf,qBAASsf,WAAT,IAAwBvS,qEAAxB;AACH;AACJ,KAXD;AAYA,WAAO6P,MAAP;AACH;;AAED;;;;;;;;;AASA,SAAStc,OAAT,CAAkB8V,SAAlB,EAA6BhM,QAA7B,EAAuCpK,QAAvC,EAAiDwf,iBAAjD,EAAoE;AAChE,QAAMC,YAAYR,YAAY7I,SAAZ,EAAuBhM,QAAvB,CAAlB;AACA,QAAMsV,aAAaP,cAAc/I,SAAd,EAAyBpW,QAAzB,CAAnB;AACA,QAAMkK,aAAakM,UAAUlV,aAAV,EAAnB;AACA,QAAMyQ,gBAAgBzH,WAAWnI,SAAX,EAAtB;AACA,QAAM4d,SAASzV,WAAW3V,IAA1B;AACA,QAAM+R,eAAe,EAArB;AACA,QAAMsZ,aAAa,EAAnB;AACA,QAAM7kB,SAAS,EAAf;AACA,QAAM8kB,UAAU,EAAhB;AACA,QAAM/kB,OAAO,EAAb;AACA,QAAIuF,qBAAJ;;AAEA;AACAnL,WAAOka,OAAP,CAAeuC,aAAf,EAA8Bvc,OAA9B,CAAsC,iBAAkB;AAAA;AAAA,YAAhB1C,GAAgB;AAAA,YAAXC,KAAW;;AACpD,YAAI8sB,UAAU1gB,OAAV,CAAkBrM,GAAlB,MAA2B,CAAC,CAA5B,IAAiCgtB,WAAWhtB,GAAX,CAArC,EAAsD;AAClDqI,mBAAOzF,IAAP,CAAY6f,sDAAOA,CAAC,EAAR,EAAYxiB,MAAMoI,MAAN,EAAZ,CAAZ;;AAEA,oBAAQpI,MAAMoI,MAAN,GAAeV,IAAvB;AACA,qBAAK0L,gDAASA,CAAC4B,OAAf;AACIiY,+BAAWtqB,IAAX,CAAgB5C,GAAhB;AACA;AACJ;AACA,qBAAKqT,gDAASA,CAACC,SAAf;AACIM,iCAAahR,IAAb,CAAkB5C,GAAlB;AANJ;AAQH;AACJ,KAbD;AAcA;AACA,QAAIotB,WAAW,CAAf;AACAnc,oFAAkBA,CAACyS,UAAUjY,WAA7B,EAA0C,UAAC3J,CAAD,EAAO;AAC7C,YAAIwW,OAAO,EAAX;AACA1E,qBAAalR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxBkL,mBAAUA,IAAV,SAAkB2G,cAAc7R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAlB;AACH,SAFD;AAGA,YAAIqrB,QAAQ7U,IAAR,MAAkB3L,SAAtB,EAAiC;AAC7BwgB,oBAAQ7U,IAAR,IAAgB8U,QAAhB;AACAhlB,iBAAKxF,IAAL,CAAU,EAAV;AACAgR,yBAAalR,OAAb,CAAqB,UAAC0K,CAAD,EAAO;AACxBhF,qBAAKglB,QAAL,EAAehgB,CAAf,IAAoB6R,cAAc7R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAApB;AACH,aAFD;AAGAorB,uBAAWxqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBhF,qBAAKglB,QAAL,EAAehgB,CAAf,IAAoB,CAAC6R,cAAc7R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAAD,CAApB;AACH,aAFD;AAGAsrB,wBAAY,CAAZ;AACH,SAVD,MAUO;AACHF,uBAAWxqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBhF,qBAAK+kB,QAAQ7U,IAAR,CAAL,EAAoBlL,CAApB,EAAuBxK,IAAvB,CAA4Bqc,cAAc7R,CAAd,EAAiBgC,YAAjB,CAA8BhH,IAA9B,CAAmCtG,CAAnC,CAA5B;AACH,aAFD;AAGH;AACJ,KApBD;;AAsBA;AACA,QAAI+O,cAAc,EAAlB;AACA,QAAIC,gBAAgB,SAAhBA,aAAgB;AAAA,eAAM4S,UAAU3S,YAAV,EAAN;AAAA,KAApB;AACA3I,SAAK1F,OAAL,CAAa,UAACN,GAAD,EAAS;AAClB,YAAMqnB,QAAQrnB,GAAd;AACA8qB,mBAAWxqB,OAAX,CAAmB,UAAC0K,CAAD,EAAO;AACtBqc,kBAAMrc,CAAN,IAAW4f,WAAW5f,CAAX,EAAchL,IAAIgL,CAAJ,CAAd,EAAsB0D,aAAtB,EAAqCD,WAArC,CAAX;AACH,SAFD;AAGH,KALD;AAMA,QAAIic,iBAAJ,EAAuB;AACnBA,0BAAkBhd,qBAAlB;AACAnC,uBAAemf,iBAAf;AACH,KAHD,MAIK;AACDnf,uBAAe,IAAI/C,+CAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,MAAMorB,MAAR,EAA5B,CAAf;AACH;AACD,WAAOtf,YAAP;AACH;;;;;;;;;;;;;;AC1ID;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;ACbA;AAAA;AAAA;;;;;;;AAOA,SAAS0f,SAAT,CAAoBxR,CAApB,EAAuBC,CAAvB,EAA0B;AACtB,QAAMwR,UAAQzR,CAAd;AACA,QAAM0R,UAAQzR,CAAd;AACA,QAAIwR,KAAKC,EAAT,EAAa;AACT,eAAO,CAAC,CAAR;AACH;AACD,QAAID,KAAKC,EAAT,EAAa;AACT,eAAO,CAAP;AACH;AACD,WAAO,CAAP;AACH;;AAED;;;;;;;;;;AAUA,SAASC,KAAT,CAAgBtkB,GAAhB,EAAqBukB,EAArB,EAAyBC,GAAzB,EAA8BC,EAA9B,EAAkCC,MAAlC,EAA0C;AACtC,QAAMC,UAAU3kB,GAAhB;AACA,QAAM4kB,SAAS,EAAf;AACA,SAAK,IAAIhsB,IAAI2rB,EAAb,EAAiB3rB,KAAK6rB,EAAtB,EAA0B7rB,KAAK,CAA/B,EAAkC;AAC9BgsB,eAAOhsB,CAAP,IAAY+rB,QAAQ/rB,CAAR,CAAZ;AACH;AACD,QAAI+Z,IAAI4R,EAAR;AACA,QAAI3R,IAAI4R,MAAM,CAAd;;AAEA,SAAK,IAAI5rB,KAAI2rB,EAAb,EAAiB3rB,MAAK6rB,EAAtB,EAA0B7rB,MAAK,CAA/B,EAAkC;AAC9B,YAAI+Z,IAAI6R,GAAR,EAAa;AACTG,oBAAQ/rB,EAAR,IAAagsB,OAAOhS,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHD,MAGO,IAAIA,IAAI6R,EAAR,EAAY;AACfE,oBAAQ/rB,EAAR,IAAagsB,OAAOjS,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHM,MAGA,IAAI+R,OAAOE,OAAOjS,CAAP,CAAP,EAAkBiS,OAAOhS,CAAP,CAAlB,KAAgC,CAApC,EAAuC;AAC1C+R,oBAAQ/rB,EAAR,IAAagsB,OAAOjS,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH,SAHM,MAGA;AACHgS,oBAAQ/rB,EAAR,IAAagsB,OAAOhS,CAAP,CAAb;AACAA,iBAAK,CAAL;AACH;AACJ;AACJ;;AAED;;;;;;;;;;AAUA,SAAS1Q,IAAT,CAAelC,GAAf,EAAoBukB,EAApB,EAAwBE,EAAxB,EAA4BC,MAA5B,EAAoC;AAChC,QAAID,OAAOF,EAAX,EAAe;AAAE,eAAOvkB,GAAP;AAAa;;AAE9B,QAAMwkB,MAAMD,KAAKrR,KAAKyL,KAAL,CAAW,CAAC8F,KAAKF,EAAN,IAAY,CAAvB,CAAjB;AACAriB,SAAKlC,GAAL,EAAUukB,EAAV,EAAcC,GAAd,EAAmBE,MAAnB;AACAxiB,SAAKlC,GAAL,EAAUwkB,MAAM,CAAhB,EAAmBC,EAAnB,EAAuBC,MAAvB;AACAJ,UAAMtkB,GAAN,EAAWukB,EAAX,EAAeC,GAAf,EAAoBC,EAApB,EAAwBC,MAAxB;;AAEA,WAAO1kB,GAAP;AACH;;AAED;;;;;;;;;AASO,SAAS6kB,SAAT,CAAoB7kB,GAApB,EAA6C;AAAA,QAApB0kB,MAAoB,uEAAXP,SAAW;;AAChD,QAAInkB,IAAIlG,MAAJ,GAAa,CAAjB,EAAoB;AAChBoI,aAAKlC,GAAL,EAAU,CAAV,EAAaA,IAAIlG,MAAJ,GAAa,CAA1B,EAA6B4qB,MAA7B;AACH;AACD,WAAO1kB,GAAP;AACH,C;;;;;;;;;;;;AC1FD;AAAA;AAAA;AAAA;;AAEA;;;;;;;;;AASO,SAAS8kB,iBAAT,CAA4BtF,GAA5B,EAAiCC,GAAjC,EAAsC;AACzC,QAAMI,gBAAgBL,IAAIla,aAAJ,EAAtB;AACA,QAAMwa,gBAAgBL,IAAIna,aAAJ,EAAtB;AACA;AACA;AACA,QAAMyf,kBAAkB7E,0EAAeA,CAACL,aAAhB,EAA+BC,aAA/B,CAAxB;;AAEA,WAAO,UAACa,SAAD,EAAYC,SAAZ,EAA0B;AAC7B,YAAIoE,cAAc,IAAlB;AACAD,wBAAgBvrB,OAAhB,CAAwB,UAACqM,SAAD,EAAe;AACnC,gBAAI8a,UAAU9a,SAAV,EAAqBgR,aAArB,KACA+J,UAAU/a,SAAV,EAAqBgR,aADrB,IACsCmO,WAD1C,EACuD;AACnDA,8BAAc,IAAd;AACH,aAHD,MAGO;AACHA,8BAAc,KAAd;AACH;AACJ,SAPD;AAQA,eAAOA,WAAP;AACH,KAXD;AAYH,C;;;;;;;;;;;;AC9BD;AAAA;AAAA;AAAA;AAAA;AACA;;AAEO,SAAS3X,WAAT,CAAsB4X,UAAtB,EAAkCC,UAAlC,EAA8C;AACjD,WAAO/X,mEAAYA,CAAC8X,UAAb,EAAyBC,UAAzB,EAAqCJ,uFAAiBA,CAACG,UAAlB,EAA8BC,UAA9B,CAArC,EAAgF,IAAhF,CAAP;AACH,C;;;;;;;;;;;;ACLD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;;AAGO,SAAS5X,aAAT,CAAwB2X,UAAxB,EAAoCC,UAApC,EAAgDpJ,QAAhD,EAA0D;AAC7D,WAAO3O,mEAAYA,CAAC8X,UAAb,EAAyBC,UAAzB,EAAqCpJ,QAArC,EAA+C,KAA/C,EAAsDze,gDAAKA,CAACE,SAA5D,CAAP;AACH;;AAEM,SAASgQ,cAAT,CAAyB0X,UAAzB,EAAqCC,UAArC,EAAiDpJ,QAAjD,EAA2D;AAC9D,WAAO3O,mEAAYA,CAAC+X,UAAb,EAAyBD,UAAzB,EAAqCnJ,QAArC,EAA+C,KAA/C,EAAsDze,gDAAKA,CAACG,UAA5D,CAAP;AACH;;AAEM,SAASgQ,aAAT,CAAwByX,UAAxB,EAAoCC,UAApC,EAAgDpJ,QAAhD,EAA0D;AAC7D,WAAOrO,oDAAKA,CAACH,cAAc2X,UAAd,EAA0BC,UAA1B,EAAsCpJ,QAAtC,CAAN,EAAuDvO,eAAe0X,UAAf,EAA2BC,UAA3B,EAAuCpJ,QAAvC,CAAvD,CAAP;AACH,C;;;;;;;;;;;;ACfD;AAAA;AAAA;AAAA;;;;;;;AAOO,IAAM5O,oBAAoB,SAApBA,iBAAoB;AAAA,oCAAIvL,IAAJ;AAAIA,QAAJ;AAAA;;AAAA,SAAa;AAAA,WAAMud,GAAGhS,iBAAH,WAAwBvL,IAAxB,CAAN;AAAA,GAAb;AAAA,CAA1B;;AAEP;;;;;;;AAOO,IAAMO,OAAO,SAAPA,IAAO;AAAA,qCAAIP,IAAJ;AAAIA,QAAJ;AAAA;;AAAA,SAAa;AAAA,WAAMud,GAAGhd,IAAH,WAAWP,IAAX,CAAN;AAAA,GAAb;AAAA,CAAb,C;;;;;;;;;;;;AChBP;AAAA;AAAA;;;;;;;AAOO,SAASoG,kBAAT,CAA6BrE,UAA7B,EAAyC8F,QAAzC,EAAmD;AACtD,QAAI9F,WAAW5J,MAAX,GAAoB,CAAxB,EAA2B;AACvB,YAAMqrB,aAAazhB,WAAWG,KAAX,CAAiB,GAAjB,CAAnB;AACAshB,mBAAW3rB,OAAX,CAAmB,UAAC4rB,OAAD,EAAa;AAC5B,gBAAMC,aAAaD,QAAQvhB,KAAR,CAAc,GAAd,CAAnB;AACA,gBAAME,QAAQ,CAAEshB,WAAW,CAAX,CAAhB;AACA,gBAAMrhB,MAAM,EAAEqhB,WAAW,CAAX,KAAiBA,WAAW,CAAX,CAAnB,CAAZ;AACA,gBAAIrhB,OAAOD,KAAX,EAAkB;AACd,qBAAK,IAAInL,IAAImL,KAAb,EAAoBnL,KAAKoL,GAAzB,EAA8BpL,KAAK,CAAnC,EAAsC;AAClC4Q,6BAAS5Q,CAAT;AACH;AACJ;AACJ,SATD;AAUH;AACJ,C;;;;;;;;;;;;;;;;;;;;;;ACrBD;AACA;AACA;AACA;;AAEA;;;;;;;;AAQA,SAAS0sB,SAAT,CAAoBC,QAApB,EAA8BC,QAA9B,EAAwC;AACpC,QAAIC,gBAAJ;;AAEA,YAAQF,QAAR;AACA,aAAK5Y,qDAAcA,CAACC,UAApB;AACA,aAAKtC,uDAAgBA,CAACwB,QAAtB;AACI,gBAAI0Z,aAAa,KAAjB,EAAwB;AACpBC,0BAAU,iBAAC9S,CAAD,EAAIC,CAAJ;AAAA,2BAAUD,IAAIC,CAAd;AAAA,iBAAV;AACH,aAFD,MAEO;AACH6S,0BAAU,iBAAC9S,CAAD,EAAIC,CAAJ;AAAA,2BAAUA,IAAID,CAAd;AAAA,iBAAV;AACH;AACD;AACJ;AACI,gBAAI6S,aAAa,KAAjB,EAAwB;AACpBC,0BAAU,iBAAC9S,CAAD,EAAIC,CAAJ,EAAU;AAChBD,6BAAOA,CAAP;AACAC,6BAAOA,CAAP;AACA,wBAAID,MAAMC,CAAV,EAAa;AACT,+BAAO,CAAP;AACH;AACD,2BAAOD,IAAIC,CAAJ,GAAQ,CAAR,GAAY,CAAC,CAApB;AACH,iBAPD;AAQH,aATD,MASO;AACH6S,0BAAU,iBAAC9S,CAAD,EAAIC,CAAJ,EAAU;AAChBD,6BAAOA,CAAP;AACAC,6BAAOA,CAAP;AACA,wBAAID,MAAMC,CAAV,EAAa;AACT,+BAAO,CAAP;AACH;AACD,2BAAOD,IAAIC,CAAJ,GAAQ,CAAC,CAAT,GAAa,CAApB;AACH,iBAPD;AAQH;AA5BL;;AA+BA,WAAO6S,OAAP;AACH;;AAED;;;;;;;AAOA,SAASC,mBAAT,CAA8BC,QAA9B,EAAwCC,YAAxC,EAAsD;AAClD,QAAMC,YAAYrU,OAAOoU,YAAP,EAAqBE,WAArB,OAAuC,MAAvC,GAAgD,MAAhD,GAAyD,KAA3E;AACA,WAAOR,UAAUK,SAASlnB,IAAnB,EAAyBonB,SAAzB,CAAP;AACH;;AAED;;;;;;;AAOA,SAASE,SAAT,CAAoB7mB,IAApB,EAA0BoH,UAA1B,EAAsC;AAClC,QAAM2d,UAAU,IAAIjmB,GAAJ,EAAhB;AACA,QAAMgoB,cAAc,EAApB;;AAEA9mB,SAAK1F,OAAL,CAAa,UAAC+J,KAAD,EAAW;AACpB,YAAM0iB,WAAW1iB,MAAM+C,UAAN,CAAjB;AACA,YAAI2d,QAAQrlB,GAAR,CAAYqnB,QAAZ,CAAJ,EAA2B;AACvBD,wBAAY/B,QAAQplB,GAAR,CAAYonB,QAAZ,CAAZ,EAAmC,CAAnC,EAAsCvsB,IAAtC,CAA2C6J,KAA3C;AACH,SAFD,MAEO;AACHyiB,wBAAYtsB,IAAZ,CAAiB,CAACusB,QAAD,EAAW,CAAC1iB,KAAD,CAAX,CAAjB;AACA0gB,oBAAQ1lB,GAAR,CAAY0nB,QAAZ,EAAsBD,YAAYlsB,MAAZ,GAAqB,CAA3C;AACH;AACJ,KARD;;AAUA,WAAOksB,WAAP;AACH;;AAED;;;;;;;;;AASA,SAASE,kBAAT,CAA6BC,YAA7B,EAA2CC,YAA3C,EAAyDC,kBAAzD,EAA6E;AACzE,QAAMC,MAAM;AACRC,eAAOJ,aAAa,CAAb;AADC,KAAZ;;AAIAC,iBAAazlB,MAAb,CAAoB,UAACC,GAAD,EAAMqC,IAAN,EAAYC,GAAZ,EAAoB;AACpCtC,YAAIqC,IAAJ,IAAYkjB,aAAa,CAAb,EAAgBztB,GAAhB,CAAoB;AAAA,mBAAS6K,MAAM8iB,mBAAmBnjB,GAAnB,EAAwBqE,KAA9B,CAAT;AAAA,SAApB,CAAZ;AACA,eAAO3G,GAAP;AACH,KAHD,EAGG0lB,GAHH;;AAKA,WAAOA,GAAP;AACH;;AAED;;;;;;;AAOA,SAASE,iBAAT,CAA4BtnB,IAA5B,EAAkCC,MAAlC,EAA0C4F,cAA1C,EAA0D;AACtD,QAAIc,kBAAJ;AACA,QAAI4gB,iBAAJ;AACA,QAAId,iBAAJ;AACA,QAAI/sB,IAAImM,eAAejL,MAAf,GAAwB,CAAhC;;AAEA,WAAOlB,KAAK,CAAZ,EAAeA,GAAf,EAAoB;AAChBiN,oBAAYd,eAAenM,CAAf,EAAkB,CAAlB,CAAZ;AACA6tB,mBAAW1hB,eAAenM,CAAf,EAAkB,CAAlB,CAAX;AACA+sB,mBAAWtL,6DAAaA,CAAClb,MAAd,EAAsB0G,SAAtB,CAAX;;AAEA,YAAI,CAAC8f,QAAL,EAAe;AACX;AACA;AACH;;AAED,YAAIe,yDAAUA,CAACD,QAAX,CAAJ,EAA0B;AACtB;AACA5B,yEAASA,CAAC3lB,IAAV,EAAgB,UAACyT,CAAD,EAAIC,CAAJ;AAAA,uBAAU6T,SAAS9T,EAAEgT,SAASpe,KAAX,CAAT,EAA4BqL,EAAE+S,SAASpe,KAAX,CAA5B,CAAV;AAAA,aAAhB;AACH,SAHD,MAGO,IAAItH,sDAAOA,CAACwmB,QAAR,CAAJ,EAAuB;AAAA;AAC1B,oBAAMT,cAAcD,UAAU7mB,IAAV,EAAgBymB,SAASpe,KAAzB,CAApB;AACA,oBAAMof,YAAYF,SAASA,SAAS3sB,MAAT,GAAkB,CAA3B,CAAlB;AACA,oBAAMssB,eAAeK,SAAS1qB,KAAT,CAAe,CAAf,EAAkB0qB,SAAS3sB,MAAT,GAAkB,CAApC,CAArB;AACA,oBAAMusB,qBAAqBD,aAAa1tB,GAAb,CAAiB;AAAA,2BAAK2hB,6DAAaA,CAAClb,MAAd,EAAsBlG,CAAtB,CAAL;AAAA,iBAAjB,CAA3B;;AAEA+sB,4BAAYxsB,OAAZ,CAAoB,UAAC2sB,YAAD,EAAkB;AAClCA,iCAAazsB,IAAb,CAAkBwsB,mBAAmBC,YAAnB,EAAiCC,YAAjC,EAA+CC,kBAA/C,CAAlB;AACH,iBAFD;;AAIAxB,6EAASA,CAACmB,WAAV,EAAuB,UAACrT,CAAD,EAAIC,CAAJ,EAAU;AAC7B,wBAAMgU,IAAIjU,EAAE,CAAF,CAAV;AACA,wBAAMnX,IAAIoX,EAAE,CAAF,CAAV;AACA,2BAAO+T,UAAUC,CAAV,EAAaprB,CAAb,CAAP;AACH,iBAJD;;AAMA;AACA0D,qBAAKpF,MAAL,GAAc,CAAd;AACAksB,4BAAYxsB,OAAZ,CAAoB,UAAC+J,KAAD,EAAW;AAC3BrE,yBAAKxF,IAAL,gCAAa6J,MAAM,CAAN,CAAb;AACH,iBAFD;AAlB0B;AAqB7B,SArBM,MAqBA;AAAA;AACH,oBAAMmhB,SAASgB,oBAAoBC,QAApB,EAA8Bc,QAA9B,CAAf;AACA;AACA5B,6EAASA,CAAC3lB,IAAV,EAAgB,UAACyT,CAAD,EAAIC,CAAJ;AAAA,2BAAU8R,OAAO/R,EAAEgT,SAASpe,KAAX,CAAP,EAA0BqL,EAAE+S,SAASpe,KAAX,CAA1B,CAAV;AAAA,iBAAhB;AAHG;AAIN;AACJ;AACJ;;AAED;;;;;;;;;AASA,IAAMsf,sBAAsB,SAAtBA,mBAAsB,CAACC,UAAD,EAAa5nB,IAAb,EAAmBC,MAAnB,EAA2B4F,cAA3B,EAA8C;AACtE,QAAI+hB,WAAWhtB,MAAX,KAAsB,CAA1B,EAA6B;AAAE,eAAOoF,IAAP;AAAc;;AAE7C,QAAM6nB,YAAYD,WAAW,CAAX,CAAlB;AACA,QAAMpuB,MAAM,IAAIsF,GAAJ,EAAZ;;AAEAkB,SAAKyB,MAAL,CAAY,UAACC,GAAD,EAAMomB,OAAN,EAAkB;AAC1B,YAAMC,OAAOD,QAAQD,UAAUxf,KAAlB,CAAb;AACA,YAAI3G,IAAIhC,GAAJ,CAAQqoB,IAAR,CAAJ,EAAmB;AACfrmB,gBAAI/B,GAAJ,CAAQooB,IAAR,EAAcvtB,IAAd,CAAmBstB,OAAnB;AACH,SAFD,MAEO;AACHpmB,gBAAIrC,GAAJ,CAAQ0oB,IAAR,EAAc,CAACD,OAAD,CAAd;AACH;AACD,eAAOpmB,GAAP;AACH,KARD,EAQGlI,GARH;;AANsE;AAAA;AAAA;;AAAA;AAgBtE,6BAAuBA,GAAvB,8HAA4B;AAAA;;AAAA;;AAAA,gBAAlB5B,GAAkB;AAAA,gBAAbwa,GAAa;;AACxB,gBAAM4V,OAAOL,oBAAoBC,WAAW/qB,KAAX,CAAiB,CAAjB,CAApB,EAAyCuV,GAAzC,EAA8CnS,MAA9C,EAAsD4F,cAAtD,CAAb;AACArM,gBAAI6F,GAAJ,CAAQzH,GAAR,EAAaowB,IAAb;AACA,gBAAIntB,MAAMkG,OAAN,CAAcinB,IAAd,CAAJ,EAAyB;AACrBV,kCAAkBU,IAAlB,EAAwB/nB,MAAxB,EAAgC4F,cAAhC;AACH;AACJ;AAtBqE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;AAwBtE,WAAOrM,GAAP;AACH,CAzBD;;AA2BA;;;;;;;;;AASA,SAASyuB,cAAT,CAAyBjoB,IAAzB,EAA+BC,MAA/B,EAAuC4F,cAAvC,EAAuD+hB,UAAvD,EAAmE;AAC/D/hB,qBAAiBA,eAAe2N,MAAf,CAAsB,UAAC0U,MAAD,EAAY;AAC/C,YAAIA,OAAO,CAAP,MAAc,IAAlB,EAAwB;AACpBN,uBAAWptB,IAAX,CAAgB0tB,OAAO,CAAP,CAAhB;AACA,mBAAO,KAAP;AACH;AACD,eAAO,IAAP;AACH,KANgB,CAAjB;AAOA,QAAIriB,eAAejL,MAAf,KAA0B,CAA9B,EAAiC;AAAE,eAAOoF,IAAP;AAAc;;AAEjD4nB,iBAAaA,WAAWpuB,GAAX,CAAe;AAAA,eAAK2hB,6DAAaA,CAAClb,MAAd,EAAsBrD,CAAtB,CAAL;AAAA,KAAf,CAAb;;AAEA,QAAMurB,iBAAiBR,oBAAoBC,UAApB,EAAgC5nB,IAAhC,EAAsCC,MAAtC,EAA8C4F,cAA9C,CAAvB;AACA,WAAO7F,KAAKxG,GAAL,CAAS,UAACQ,GAAD,EAAS;AACrB,YAAIN,IAAI,CAAR;AACA,YAAI0uB,UAAUD,cAAd;;AAEA,eAAO,CAACttB,MAAMkG,OAAN,CAAcqnB,OAAd,CAAR,EAAgC;AAC5BA,sBAAUA,QAAQzoB,GAAR,CAAY3F,IAAI4tB,WAAWluB,GAAX,EAAgB2O,KAApB,CAAZ,CAAV;AACH;;AAED,eAAO+f,QAAQC,KAAR,EAAP;AACH,KATM,CAAP;AAUH;;AAED;;;;;;AAMO,SAASjG,QAAT,CAAmBkG,OAAnB,EAA4BziB,cAA5B,EAA4C;AAAA,QACzC5F,MADyC,GACxBqoB,OADwB,CACzCroB,MADyC;AAAA,QACjCD,IADiC,GACxBsoB,OADwB,CACjCtoB,IADiC;;;AAG/C6F,qBAAiBA,eAAe2N,MAAf,CAAsB;AAAA,eAAW,CAAC,CAAC2H,6DAAaA,CAAClb,MAAd,EAAsBsoB,QAAQ,CAAR,CAAtB,CAAb;AAAA,KAAtB,CAAjB;AACA,QAAI1iB,eAAejL,MAAf,KAA0B,CAA9B,EAAiC;AAAE;AAAS;;AAE5C,QAAI4tB,kBAAkB3iB,eAAewB,SAAf,CAAyB;AAAA,eAAWkhB,QAAQ,CAAR,MAAe,IAA1B;AAAA,KAAzB,CAAtB;AACAC,sBAAkBA,oBAAoB,CAAC,CAArB,GAAyBA,eAAzB,GAA2C3iB,eAAejL,MAA5E;;AAEA,QAAM6tB,yBAAyB5iB,eAAehJ,KAAf,CAAqB,CAArB,EAAwB2rB,eAAxB,CAA/B;AACA,QAAME,sBAAsB7iB,eAAehJ,KAAf,CAAqB2rB,eAArB,CAA5B;;AAEAlB,sBAAkBtnB,IAAlB,EAAwBC,MAAxB,EAAgCwoB,sBAAhC;AACAzoB,WAAOioB,eAAejoB,IAAf,EAAqBC,MAArB,EAA6ByoB,mBAA7B,EAAkDD,uBAAuBjvB,GAAvB,CAA2B;AAAA,eAAU0uB,OAAO,CAAP,CAAV;AAAA,KAA3B,CAAlD,CAAP;;AAEAI,YAAQ5kB,IAAR,GAAe1D,KAAKxG,GAAL,CAAS;AAAA,eAAOQ,IAAI2uB,GAAJ,EAAP;AAAA,KAAT,CAAf;AACAL,YAAQtoB,IAAR,GAAeA,IAAf;AACH,C;;;;;;;;;;;;AChQD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;AACA;;;;;;;AAOO,SAASuO,KAAT,CAAgB+R,GAAhB,EAAqBC,GAArB,EAA0B;AAC7B,QAAM+B,YAAY,EAAlB;AACA,QAAMriB,SAAS,EAAf;AACA,QAAMsiB,gBAAgB,EAAtB;AACA,QAAMviB,OAAO,EAAb;AACA,QAAM2gB,gBAAgBL,IAAIla,aAAJ,EAAtB;AACA,QAAMwa,gBAAgBL,IAAIna,aAAJ,EAAtB;AACA,QAAMoc,wBAAwB7B,cAAc1Z,SAAd,EAA9B;AACA,QAAMwb,wBAAwB7B,cAAc3Z,SAAd,EAA9B;AACA,QAAMxN,OAAUknB,cAAclnB,IAAxB,eAAsCmnB,cAAcnnB,IAA1D;;AAEA;AACA,QAAI,CAACipB,gEAAUA,CAACpC,IAAI/c,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAX,EAAiDud,IAAIhd,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,EAA8B3B,IAA9B,EAAjD,CAAL,EAA6F;AACzF,eAAO,IAAP;AACH;;AAED;AACCsd,QAAI/c,cAAJ,CAAmBoB,KAAnB,CAAyB,GAAzB,CAAD,CAAgCrK,OAAhC,CAAwC,UAACqM,SAAD,EAAe;AACnD,YAAM9E,QAAQ2gB,sBAAsB7b,SAAtB,CAAd;AACA1G,eAAOzF,IAAP,CAAY6f,sDAAOA,CAAC,EAAR,EAAYxY,MAAM5B,MAAN,EAAZ,CAAZ;AACAsiB,sBAAc/nB,IAAd,CAAmBqH,MAAM5B,MAAN,GAAexG,IAAlC;AACH,KAJD;;AAMA;;;;;;AAMA,aAASkpB,iBAAT,CAA4B3C,EAA5B,EAAgC/Y,SAAhC,EAA2C;AACvC4B,wFAAkBA,CAACmX,GAAG3c,WAAtB,EAAmC,UAAC3J,CAAD,EAAO;AACtC,gBAAM2nB,QAAQ,EAAd;AACA,gBAAIwB,WAAW,EAAf;AACAN,0BAAcjoB,OAAd,CAAsB,UAACwoB,UAAD,EAAgB;AAClC,oBAAMjrB,QAAQoP,UAAU6b,UAAV,EAAsB9b,YAAtB,CAAmChH,IAAnC,CAAwCtG,CAAxC,CAAd;AACAmpB,kCAAgBhrB,KAAhB;AACAwpB,sBAAMyB,UAAN,IAAoBjrB,KAApB;AACH,aAJD;AAKA,gBAAI,CAACyqB,UAAUO,QAAV,CAAL,EAA0B;AACtB7iB,qBAAKxF,IAAL,CAAU6mB,KAAV;AACAiB,0BAAUO,QAAV,IAAsB,IAAtB;AACH;AACJ,SAZD;AAaH;;AAED;AACAF,sBAAkBrC,GAAlB,EAAuBkC,qBAAvB;AACAG,sBAAkBpC,GAAlB,EAAuBkC,qBAAvB;;AAEA,WAAO,IAAIjgB,+CAAJ,CAAcxC,IAAd,EAAoBC,MAApB,EAA4B,EAAExG,UAAF,EAA5B,CAAP;AACH,C;;;;;;;;;;;;;;;;;;;;;AC7DD;AACA;AACA;AAOA;;AAEA;;;;;;;;;;;;IAWMiT,Q;;AAEF;;;;;;;;;;AAUA,wBAAwB;AAAA;;AACpB,YAAIkc,eAAJ;;AAEA,aAAKrM,OAAL,GAAe,IAAf;AACA,aAAKnH,WAAL,GAAmB,EAAnB;AACA,aAAKO,mBAAL,GAA2B,EAA3B;AACA,aAAKwG,SAAL,GAAiB,EAAjB;;AANoB,0CAAR7W,MAAQ;AAARA,kBAAQ;AAAA;;AAQpB,YAAIA,OAAO1K,MAAP,KAAkB,CAAlB,IAAwB,CAACguB,SAAStjB,OAAO,CAAP,CAAV,aAAgCoH,QAA5D,EAAuE;AACnE;AACA,iBAAKnJ,cAAL,GAAsBqlB,OAAOrlB,cAA7B;AACA,iBAAKF,WAAL,GAAmBulB,OAAOvlB,WAA1B;AACA,iBAAK8C,WAAL,GAAmByiB,OAAOziB,WAA1B;AACA,iBAAKoW,OAAL,GAAeqM,MAAf;AACA,iBAAK/hB,kBAAL,GAA0B,KAAK0V,OAAL,CAAa1V,kBAAvC;AACA,iBAAKgiB,eAAL,GAAuBrZ,0DAAWA,EAAlC;AACA,iBAAK9H,qBAAL,GAA6BC,qBAA7B;AACH,SATD,MASO;AACHiT,8DAAUA,mBAAC,IAAX,SAAoBtV,MAApB;AACA,iBAAKujB,eAAL,GAAuB,KAAKhiB,kBAAL,CAAwBpN,IAA/C;AACA,iBAAKiO,qBAAL,GAA6BC,qBAA7B;AACA,iBAAKgC,qBAAL,GAA6B;AACzBuT,gCAAgB,EADS;AAEzBY,kCAAkB;AAFO,aAA7B;AAIH;AACJ;;AAED;;;;;;;;;;;;;;;;;;;;;;;;oCAoBa;AACT,mBAAO,KAAK1X,aAAL,GAAqBxE,MAArB,CAA4BpI,GAA5B,CAAgC;AAAA,uBAAK8J,EAAErD,MAAF,EAAL;AAAA,aAAhC,CAAP;AACH;;AAED;;;;;;;;;;;kCAQU;AACN,mBAAO,KAAK4oB,eAAZ;AACH;;;wCAEgB;AACb,mBAAO,KAAKC,WAAZ;AACH;;;gDAEwB;AACrB,iBAAKA,WAAL,GAAmBpU,4DAAYA,CAAC,CAAC,KAAKrR,WAAN,EAAmB,KAAKE,cAAxB,CAAb,EACd,KAAKN,oBAAL,EADc,EACe,KAAK4lB,eADpB,CAAnB;AAEA,mBAAO,IAAP;AACH;;;+CAEuB;AACpB,mBAAO,KAAKhiB,kBAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;6BA8BMkiB,Q,EAAUnM,Q,EAAU;AACtB,mBAAO3O,8DAAYA,CAAC,IAAb,EAAmB8a,QAAnB,EAA6BnM,QAA7B,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;oCAoBamM,Q,EAAU;AACnB,mBAAO9a,8DAAYA,CAAC,IAAb,EAAmB8a,QAAnB,EAA6BnD,mEAAiBA,CAAC,IAAlB,EAAwBmD,QAAxB,CAA7B,EAAgE,IAAhE,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;8BAkBOC,S,EAAW;AACd,mBAAOza,wDAAM,IAAN,EAAYya,SAAZ,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;mCAiBYC,c,EAAgB;AACxB,mBAAO/a,6DAAW,IAAX,EAAiB+a,cAAjB,CAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;+BA+CQ9R,Q,EAAUhS,M,EAAQ;AACtB,gBAAMuG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdzG,2BAAW;AAFG,aAAlB;AAIAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBsK,SAAlB,EAA6BvG,MAA7B,CAAT;AACAA,mBAAOwG,IAAP,GAAcxG,OAAOwG,IAAP,IAAeD,UAAUC,IAAvC;;AAEA,gBAAMiO,cAAc,EAAExU,WAAWD,OAAOC,SAApB,EAApB;AACA,mBAAOuU,+DAAeA,CAClB,IADG,EAEHxC,QAFG,EAGHhS,MAHG,EAIHyU,WAJG,CAAP;AAMH;;AAED;;;;;;;;;;;;;;;;;;;;;kCAkBW;AACP,mBAAO,CAAC,KAAKvW,WAAL,CAAiBzI,MAAlB,IAA4B,CAAC,KAAK2I,cAAL,CAAoB3I,MAAxD;AACH;;AAED;;;;;;;;;;gCAOyB;AAAA,gBAAlBwK,SAAkB,uEAAN,IAAM;;AACrB,gBAAM8R,WAAW,IAAI,KAAKhR,WAAT,CAAqB,IAArB,CAAjB;AACA,gBAAId,SAAJ,EAAe;AACX8R,yBAAStR,SAAT,CAAmB,IAAnB;AACH,aAFD,MAEO;AACHsR,yBAAStR,SAAT,CAAmB,IAAnB;AACH;AACD,mBAAOsR,QAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;gCA2CS8C,S,EAAW7U,M,EAAQ;AACxB,gBAAMuG,YAAY;AACdC,sBAAMC,oDAAaA,CAACC,MADN;AAEdzG,2BAAW;AAFG,aAAlB;AAIAD,qBAAS/K,OAAOgH,MAAP,CAAc,EAAd,EAAkBsK,SAAlB,EAA6BvG,MAA7B,CAAT;AACA,gBAAM8G,cAAc,KAAKjE,eAAL,EAApB;AACA,gBAAMkE,YAAY9R,OAAOmI,IAAP,CAAY0J,WAAZ,CAAlB;AAPwB,0BAQP9G,MARO;AAAA,gBAQhBwG,IARgB,WAQhBA,IARgB;;AASxB,gBAAMuS,sBAAsB7R,sEAAsBA,CAAC2N,SAAvB,EAAkC9N,SAAlC,EAA6CD,WAA7C,CAA5B;;AAEA,gBAAIqP,kBAAJ;;AAEA,gBAAI3P,SAASC,oDAAaA,CAACmB,GAA3B,EAAgC;AAC5B,oBAAImc,kBAAkBnP,gEAAgBA,CAAC,IAAjB,EAAuBmE,mBAAvB,EAA4C;AAC9DvS,0BAAMC,oDAAaA,CAACC,MAD0C;AAE9DzG,+BAAWD,OAAOC;AAF4C,iBAA5C,EAGnB8G,SAHmB,CAAtB;AAIA,oBAAIid,iBAAiBpP,gEAAgBA,CAAC,IAAjB,EAAuBmE,mBAAvB,EAA4C;AAC7DvS,0BAAMC,oDAAaA,CAACkB,OADyC;AAE7D1H,+BAAWD,OAAOC;AAF2C,iBAA5C,EAGlB8G,SAHkB,CAArB;AAIAoP,4BAAY,CAAC4N,eAAD,EAAkBC,cAAlB,CAAZ;AACH,aAVD,MAUO;AACH,oBAAID,mBAAkBnP,gEAAgBA,CAAC,IAAjB,EAAuBmE,mBAAvB,EAA4C/Y,MAA5C,EAAoD+G,SAApD,CAAtB;AACAoP,4BAAY4N,gBAAZ;AACH;;AAED,mBAAO5N,SAAP;AACH;;;0CAEkB;AACf,mBAAO,KAAK8N,YAAZ;AACH;;;gDAEwB;AACrB,iBAAKA,YAAL,GAAoB,KAAKN,WAAL,CAAiBlnB,MAAjB,CAAwBH,MAAxB,CAA+B,UAACC,GAAD,EAAM2nB,QAAN,EAAgB3vB,CAAhB,EAAsB;AACrEgI,oBAAI2nB,SAAS5vB,IAAT,EAAJ,IAAuB;AACnB4O,2BAAO3O,CADY;AAEnBmf,yBAAKwQ,SAASppB,MAAT;AAFc,iBAAvB;AAIA,uBAAOyB,GAAP;AACH,aANmB,EAMjB,EANiB,CAApB;AAOA,mBAAO,IAAP;AACH;;AAGD;;;;;;;;;kCAMW;AACP,iBAAK6a,OAAL,IAAgB,KAAKA,OAAL,CAAa+M,WAAb,CAAyB,IAAzB,CAAhB;AACA,iBAAK/M,OAAL,GAAe,IAAf;AACA,iBAAKJ,SAAL,CAAe7hB,OAAf,CAAuB,UAAC8hB,KAAD,EAAW;AAC9BA,sBAAMG,OAAN,GAAgB,IAAhB;AACH,aAFD;AAGA,iBAAKJ,SAAL,GAAiB,EAAjB;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;oCA0BaC,K,EAAO;AAChB,gBAAIpY,MAAM,KAAKmY,SAAL,CAAe9U,SAAf,CAAyB;AAAA,uBAAWkiB,YAAYnN,KAAvB;AAAA,aAAzB,CAAV;AACApY,oBAAQ,CAAC,CAAT,GAAa,KAAKmY,SAAL,CAAe5a,MAAf,CAAsByC,GAAtB,EAA2B,CAA3B,CAAb,GAA6C,IAA7C;AACH;;AAED;;;;;;;;kCAKWwlB,M,EAAQ;AACf,iBAAKjN,OAAL,IAAgB,KAAKA,OAAL,CAAa+M,WAAb,CAAyB,IAAzB,CAAhB;AACA,iBAAK/M,OAAL,GAAeiN,MAAf;AACAA,sBAAUA,OAAOrN,SAAP,CAAiB3hB,IAAjB,CAAsB,IAAtB,CAAV;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;oCAwBa;AACT,mBAAO,KAAK+hB,OAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;sCAyBe;AACX,mBAAO,KAAKJ,SAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;yCAwBkB;AACd,mBAAO,KAAK/G,WAAZ;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;iDAwB0B;AACtB,mBAAO,KAAKO,mBAAZ;AACH;;;;;;AAGUjJ,uEAAf,E;;;;;;;;;;;;ACjkBA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;IAEe0W,G,GAAoDc,kE,CAApDd,G;IAAKI,G,GAA+CU,kE,CAA/CV,G;IAAK7R,G,GAA0CuS,kE,CAA1CvS,G;IAAKE,G,GAAqCqS,kE,CAArCrS,G;IAAK6R,K,GAAgCQ,kE,CAAhCR,K;IAAOC,I,GAAyBO,kE,CAAzBP,I;IAAMC,K,GAAmBM,kE,CAAnBN,K;IAAY6F,E,GAAOvF,kE,CAAZF,G;;;;;;;;;;;;;ACFvD;AAAA;;;;;;AAMe,yEAACnlB,KAAD,EAAW;AACtB,QAAInF,IAAI,CAAR;AACA,WAAO,YAAe;AAAA,0CAAXkI,MAAW;AAAXA,kBAAW;AAAA;;AAClBA,eAAOtH,OAAP,CAAe,UAAC8X,GAAD,EAAMhL,UAAN,EAAqB;AAChC,gBAAI,EAAEvI,MAAMuI,UAAN,aAA6BvM,KAA/B,CAAJ,EAA2C;AACvCgE,sBAAMuI,UAAN,IAAoBvM,MAAMujB,IAAN,CAAW,EAAExjB,QAAQlB,CAAV,EAAX,CAApB;AACH;AACDmF,kBAAMuI,UAAN,EAAkB5M,IAAlB,CAAuB4X,GAAvB;AACH,SALD;AAMA1Y;AACH,KARD;AASH,CAXD,E;;;;;;;;;;;;;;;;ACNA;;;;;;AAMA,SAASgwB,mBAAT,CAA8BzuB,IAA9B,EAAoC;AAChC,QAAIA,gBAAgB9C,IAApB,EAA0B;AACtB,eAAO8C,IAAP;AACH;;AAED,WAAO,IAAI9C,IAAJ,CAAS8C,IAAT,CAAP;AACH;AACD;;;;;;;AAOA,SAASR,GAAT,CAAc6B,CAAd,EAAiB;AACb,WAAQA,IAAI,EAAL,SAAgBA,CAAhB,GAAuBA,CAA9B;AACH;AACD;;;;;;;;;AASA;;;;;;;AAOAP,OAAO4tB,MAAP,GAAgB,UAAUztB,IAAV,EAAgB;AAC5B,WAAOA,KAAKY,OAAL,CAAa,0BAAb,EAAyC,MAAzC,CAAP;AACH,CAFD;;AAIA;;;;;;;;AAQA,0BAA2B,SAAS6R,iBAAT,CAA4BhW,MAA5B,EAAoC;AAC3D,SAAKA,MAAL,GAAcA,MAAd;AACA,SAAKixB,QAAL,GAAgBrlB,SAAhB;AACA,SAAK2O,UAAL,GAAkB3O,SAAlB;AACH;;AAED;AACAoK,kBAAkBkb,YAAlB,GAAiC,GAAjC;;AAEA;AACA;AACAlb,kBAAkBmb,uBAAlB,GAA4C;AACxCC,UAAM,CADkC;AAExCC,WAAO,CAFiC;AAGxCC,SAAK,CAHmC;AAIxCC,UAAM,CAJkC;AAKxCC,YAAQ,CALgC;AAMxCC,YAAQ,CANgC;AAOxCC,iBAAa;AAP2B,CAA5C;;AAUA;;;;;;;AAOA1b,kBAAkB2b,mBAAlB,GAAwC,UAAUC,MAAV,EAAkB;AACtD,WAAO,UAAUnY,GAAV,EAAe;AAClB,YAAIW,kBAAJ;AACA,YAAIkB,SAASlB,YAAYyX,SAASpY,GAAT,EAAc,EAAd,CAArB,CAAJ,EAA6C;AACzC,mBAAOW,SAAP;AACH;;AAED,eAAOwX,MAAP;AACH,KAPD;AAQH,CATD;;AAWA;;;;;;;;AAQA5b,kBAAkB8b,kBAAlB,GAAuC,UAAU/R,KAAV,EAAiB6R,MAAjB,EAAyB;AAC5D,WAAO,UAACnY,GAAD,EAAS;AACZ,YAAI1Y,UAAJ;AACA,YAAIgxB,UAAJ;;AAEA,YAAI,CAACtY,GAAL,EAAU;AAAE,mBAAOmY,MAAP;AAAgB;;AAE5B,YAAMI,OAAOvY,IAAIwU,WAAJ,EAAb;;AAEA,aAAKltB,IAAI,CAAJ,EAAOgxB,IAAIhS,MAAM9d,MAAtB,EAA8BlB,IAAIgxB,CAAlC,EAAqChxB,GAArC,EAA0C;AACtC,gBAAIgf,MAAMhf,CAAN,EAASktB,WAAT,OAA2B+D,IAA/B,EAAqC;AACjC,uBAAOjxB,CAAP;AACH;AACJ;;AAED,YAAIA,MAAM6K,SAAV,EAAqB;AACjB,mBAAOgmB,MAAP;AACH;AACD,eAAO,IAAP;AACH,KAlBD;AAmBH,CApBD;;AAsBA;;;;;;;;;;;;;;;;;AAiBA5b,kBAAkBic,mBAAlB,GAAwC,YAAY;AAChD,QAAMC,UAAU;AACZC,eAAO,CACH,KADG,EAEH,KAFG,EAGH,KAHG,EAIH,KAJG,EAKH,KALG,EAMH,KANG,EAOH,KAPG,CADK;AAUZC,cAAM,CACF,QADE,EAEF,QAFE,EAGF,SAHE,EAIF,WAJE,EAKF,UALE,EAMF,QANE,EAOF,UAPE;AAVM,KAAhB;AAoBA,QAAMC,YAAY;AACdF,eAAO,CACH,KADG,EAEH,KAFG,EAGH,KAHG,EAIH,KAJG,EAKH,KALG,EAMH,KANG,EAOH,KAPG,EAQH,KARG,EASH,KATG,EAUH,KAVG,EAWH,KAXG,EAYH,KAZG,CADO;AAedC,cAAM,CACF,SADE,EAEF,UAFE,EAGF,OAHE,EAIF,OAJE,EAKF,KALE,EAMF,MANE,EAOF,MAPE,EAQF,QARE,EASF,WATE,EAUF,SAVE,EAWF,UAXE,EAYF,UAZE;AAfQ,KAAlB;;AA+BA,QAAME,cAAc;AAChBC,WAAG;AACC;AACAzxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;;AAEA,uBAAO9O,EAAE8nB,QAAF,GAAaC,QAAb,EAAP;AACH;AAVF,SADa;AAahBX,WAAG;AACC;AACAjxB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMlX,QAAQoI,EAAE8nB,QAAF,KAAe,EAA7B;;AAEA,uBAAO,CAAClwB,UAAU,CAAV,GAAc,EAAd,GAAmBA,KAApB,EAA2BmwB,QAA3B,EAAP;AACH;AAXF,SAba;AA0BhBC,WAAG;AACC;AACA7xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,SAAP;AAAmB,aAJjC;;AAKCzZ,oBAAQ,gBAACU,GAAD,EAAS;AACb,oBAAIA,GAAJ,EAAS;AACL,2BAAOA,IAAIwU,WAAJ,EAAP;AACH;AACD,uBAAO,IAAP;AACH,aAVF;AAWC/jB,uBAAW,mBAACuP,GAAD,EAAS;AAChB,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMlX,QAAQoI,EAAE8nB,QAAF,EAAd;;AAEA,uBAAQlwB,QAAQ,EAAR,GAAa,IAAb,GAAoB,IAA5B;AACH;AAhBF,SA1Ba;AA4ChBqwB,WAAG;AACC;AACA9xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,SAAP;AAAmB,aAJjC;;AAKCzZ,oBAAQ,gBAACU,GAAD,EAAS;AACb,oBAAIA,GAAJ,EAAS;AACL,2BAAOA,IAAIwU,WAAJ,EAAP;AACH;AACD,uBAAO,IAAP;AACH,aAVF;AAWC/jB,uBAAW,mBAACuP,GAAD,EAAS;AAChB,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMlX,QAAQoI,EAAE8nB,QAAF,EAAd;;AAEA,uBAAQlwB,QAAQ,EAAR,GAAa,IAAb,GAAoB,IAA5B;AACH;AAhBF,SA5Ca;AA8DhBswB,WAAG;AACC;AACA/xB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMqZ,OAAOnoB,EAAEooB,UAAF,EAAb;;AAEA,uBAAOjxB,IAAIgxB,IAAJ,CAAP;AACH;AAXF,SA9Da;AA2EhBE,WAAG;AACC;AACAlyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAM9W,UAAUgI,EAAEsoB,UAAF,EAAhB;;AAEA,uBAAOnxB,IAAIa,OAAJ,CAAP;AACH;AAXF,SA3Ea;AAwFhBuwB,WAAG;AACC;AACApyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAM0Z,KAAKxoB,EAAEyoB,eAAF,EAAX;;AAEA,uBAAOD,GAAGT,QAAH,EAAP;AACH;AAXF,SAxFa;AAqGhB5X,WAAG;AACC;AACAha,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,6BAAWN,QAAQC,KAAR,CAAcjxB,IAAd,CAAmB,GAAnB,CAAX;AAAwC,aAJtD;;AAKC6X,oBAAQ/C,kBAAkB8b,kBAAlB,CAAqCI,QAAQC,KAA7C,CALT;AAMCjoB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAM4Z,MAAM1oB,EAAE2oB,MAAF,EAAZ;;AAEA,uBAAQpB,QAAQC,KAAR,CAAckB,GAAd,CAAD,CAAqBX,QAArB,EAAP;AACH;AAXF,SArGa;AAkHhBa,WAAG;AACC;AACAzyB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,6BAAWN,QAAQE,IAAR,CAAalxB,IAAb,CAAkB,GAAlB,CAAX;AAAuC,aAJrD;;AAKC6X,oBAAQ/C,kBAAkB8b,kBAAlB,CAAqCI,QAAQE,IAA7C,CALT;AAMCloB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAM4Z,MAAM1oB,EAAE2oB,MAAF,EAAZ;;AAEA,uBAAQpB,QAAQE,IAAR,CAAaiB,GAAb,CAAD,CAAoBX,QAApB,EAAP;AACH;AAXF,SAlHa;AA+HhBznB,WAAG;AACC;AACAnK,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAM4Z,MAAM1oB,EAAE6oB,OAAF,EAAZ;;AAEA,uBAAOH,IAAIX,QAAJ,EAAP;AACH;AAXF,SA/Ha;AA4IhB/nB,WAAG;AACC;AACA7J,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAM4Z,MAAM1oB,EAAE6oB,OAAF,EAAZ;;AAEA,uBAAO1xB,IAAIuxB,GAAJ,CAAP;AACH;AAXF,SA5Ia;AAyJhBtY,WAAG;AACC;AACAja,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,6BAAWH,UAAUF,KAAV,CAAgBjxB,IAAhB,CAAqB,GAArB,CAAX;AAA0C,aAJxD;;AAKC6X,oBAAQ/C,kBAAkB8b,kBAAlB,CAAqCO,UAAUF,KAA/C,CALT;AAMCjoB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMga,QAAQ9oB,EAAE+oB,QAAF,EAAd;;AAEA,uBAAQrB,UAAUF,KAAV,CAAgBsB,KAAhB,CAAD,CAAyBf,QAAzB,EAAP;AACH;AAXF,SAzJa;AAsKhBiB,WAAG;AACC;AACA7yB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,6BAAWH,UAAUD,IAAV,CAAelxB,IAAf,CAAoB,GAApB,CAAX;AAAyC,aAJvD;;AAKC6X,oBAAQ/C,kBAAkB8b,kBAAlB,CAAqCO,UAAUD,IAA/C,CALT;AAMCloB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMga,QAAQ9oB,EAAE+oB,QAAF,EAAd;;AAEA,uBAAQrB,UAAUD,IAAV,CAAeqB,KAAf,CAAD,CAAwBf,QAAxB,EAAP;AACH;AAXF,SAtKa;AAmLhB3D,WAAG;AACC;AACAjuB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,QAAP;AAAkB,aAJhC;AAKCzZ,kBALD,kBAKSU,GALT,EAKc;AAAE,uBAAOzD,kBAAkB2b,mBAAlB,GAAwClY,GAAxC,IAA+C,CAAtD;AAA0D,aAL1E;AAMCvP,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMga,QAAQ9oB,EAAE+oB,QAAF,EAAd;;AAEA,uBAAO5xB,IAAI2xB,QAAQ,CAAZ,CAAP;AACH;AAXF,SAnLa;AAgMhBG,WAAG;AACC;AACA9yB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,UAAP;AAAoB,aAJlC;AAKCzZ,kBALD,kBAKSU,GALT,EAKc;AACT,oBAAIG,eAAJ;AACA,oBAAIH,GAAJ,EAAS;AACL,wBAAMsY,IAAItY,IAAIxX,MAAd;AACAwX,0BAAMA,IAAIoa,SAAJ,CAAc9B,IAAI,CAAlB,EAAqBA,CAArB,CAAN;AACH;AACD,oBAAI3X,YAAYpE,kBAAkB2b,mBAAlB,GAAwClY,GAAxC,CAAhB;AACA,oBAAIqa,cAAc,IAAIt0B,IAAJ,EAAlB;AACA,oBAAIu0B,cAAc1Y,KAAK2Y,KAAL,CAAYF,YAAYG,WAAZ,EAAD,GAA8B,GAAzC,CAAlB;;AAEAra,8BAAYma,WAAZ,GAA0B3Z,SAA1B;;AAEA,oBAAI2W,oBAAoBnX,MAApB,EAA4Bqa,WAA5B,KAA4CH,YAAYG,WAAZ,EAAhD,EAA2E;AACvEra,mCAAYma,cAAc,CAA1B,IAA8B3Z,SAA9B;AACH;AACD,uBAAO2W,oBAAoBnX,MAApB,EAA4Bqa,WAA5B,EAAP;AACH,aArBF;AAsBC/pB,qBAtBD,qBAsBYuP,GAtBZ,EAsBiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAIrX,OAAOuI,EAAEspB,WAAF,GAAgBvB,QAAhB,EAAX;AACA,oBAAIX,UAAJ;;AAEA,oBAAI3vB,IAAJ,EAAU;AACN2vB,wBAAI3vB,KAAKH,MAAT;AACAG,2BAAOA,KAAKyxB,SAAL,CAAe9B,IAAI,CAAnB,EAAsBA,CAAtB,CAAP;AACH;;AAED,uBAAO3vB,IAAP;AACH;AAjCF,SAhMa;AAmOhB8xB,WAAG;AACC;AACApzB,kBAAM,GAFP;AAGC4O,mBAAO,CAHR;AAIC8iB,mBAJD,qBAIY;AAAE,uBAAO,UAAP;AAAoB,aAJlC;;AAKCzZ,oBAAQ/C,kBAAkB2b,mBAAlB,EALT;AAMCznB,qBAND,qBAMYuP,GANZ,EAMiB;AACZ,oBAAM9O,IAAIomB,oBAAoBtX,GAApB,CAAV;AACA,oBAAMrX,OAAOuI,EAAEspB,WAAF,GAAgBvB,QAAhB,EAAb;;AAEA,uBAAOtwB,IAAP;AACH;AAXF;AAnOa,KAApB;;AAkPA,WAAOkwB,WAAP;AACH,CAvSD;;AAySA;;;;;;AAMAtc,kBAAkBme,mBAAlB,GAAwC,YAAY;AAChD,QAAM7B,cAActc,kBAAkBic,mBAAlB,EAApB;;AAEA,WAAO;AACHV,cAAMe,YAAYC,CADf;AAEH6B,iBAAS9B,YAAYP,CAFlB;AAGHsC,wBAAgB/B,YAAYK,CAHzB;AAIH2B,wBAAgBhC,YAAYM,CAJzB;AAKHpB,gBAAQc,YAAYO,CALjB;AAMHpB,gBAAQa,YAAYU,CANjB;AAOHuB,mBAAWjC,YAAYxX,CAPpB;AAQH0Z,kBAAUlC,YAAYiB,CARnB;AASHkB,sBAAcnC,YAAYrnB,CATvB;AAUHypB,qCAA6BpC,YAAY3nB,CAVtC;AAWHgqB,qBAAarC,YAAYvX,CAXtB;AAYH6Z,oBAAYtC,YAAYqB,CAZrB;AAaHkB,uBAAevC,YAAYvD,CAbxB;AAcH+F,oBAAYxC,YAAYsB,CAdrB;AAeHmB,mBAAWzC,YAAY4B;AAfpB,KAAP;AAiBH,CApBD;;AAsBA;;;;;;;AAOAle,kBAAkBgf,aAAlB,GAAkC,YAAY;AAC1C,QAAM1C,cAActc,kBAAkBic,mBAAlB,EAApB;AACA,QAAMgD,kBAAkB,SAAlBA,eAAkB,GAAa;AAAE;AACnC,YAAIl0B,IAAI,CAAR;AACA,YAAI0tB,YAAJ;AACA,YAAIyG,oBAAJ;AACA,YAAMnD,IAAI,UAAK9vB,MAAf;;AAEA,eAAOlB,IAAIgxB,CAAX,EAAchxB,GAAd,EAAmB;AACf0tB,sCAAW1tB,CAAX,yBAAWA,CAAX;AACA,oCAASA,CAAT,yBAASA,CAAT,GAAa;AACTm0B,8BAAczG,GAAd;AACH;AACJ;;AAED,YAAI,CAACyG,WAAL,EAAkB;AAAE,mBAAO,IAAP;AAAc;;AAElC,eAAOA,YAAY,CAAZ,EAAenc,MAAf,CAAsBmc,YAAY,CAAZ,CAAtB,CAAP;AACH,KAhBD;;AAkBA,WAAO;AACH9D,cAAM,CAACkB,YAAYsB,CAAb,EAAgBtB,YAAY4B,CAA5B,EACFe,eADE,CADH;AAIH5D,eAAO,CAACiB,YAAYvX,CAAb,EAAgBuX,YAAYqB,CAA5B,EAA+BrB,YAAYvD,CAA3C,EACHkG,eADG,CAJJ;AAOH3D,aAAK,CAACgB,YAAYxX,CAAb,EAAgBwX,YAAYiB,CAA5B,EAA+BjB,YAAYrnB,CAA3C,EAA8CqnB,YAAY3nB,CAA1D,EACDsqB,eADC,CAPF;AAUH1D,cAAM,CAACe,YAAYC,CAAb,EAAgBD,YAAYP,CAA5B,EAA+BO,YAAYK,CAA3C,EAA8CL,YAAYM,CAA1D,EACF,UAAUuC,YAAV,EAAwBC,YAAxB,EAAsCC,SAAtC,EAAiDC,SAAjD,EAA4D;AACxD,gBAAIJ,oBAAJ;AACA,gBAAIK,eAAJ;AACA,gBAAIC,aAAJ;AACA,gBAAI/b,YAAJ;;AAEA,gBAAI2b,iBAAiBG,SAAUF,aAAaC,SAAxC,CAAJ,EAAyD;AACrD,oBAAIC,OAAO,CAAP,EAAUxc,MAAV,CAAiBwc,OAAO,CAAP,CAAjB,MAAgC,IAApC,EAA0C;AACtCC,2BAAO,IAAP;AACH;;AAEDN,8BAAcE,YAAd;AACH,aAND,MAMO,IAAIA,YAAJ,EAAkB;AACrBF,8BAAcE,YAAd;AACH,aAFM,MAEA;AACHF,8BAAcC,YAAd;AACH;;AAED,gBAAI,CAACD,WAAL,EAAkB;AAAE,uBAAO,IAAP;AAAc;;AAElCzb,kBAAMyb,YAAY,CAAZ,EAAenc,MAAf,CAAsBmc,YAAY,CAAZ,CAAtB,CAAN;AACA,gBAAIM,IAAJ,EAAU;AACN/b,uBAAO,EAAP;AACH;AACD,mBAAOA,GAAP;AACH,SA1BC,CAVH;AAsCH+X,gBAAQ,CAACc,YAAYO,CAAb,EACJoC,eADI,CAtCL;AAyCHxD,gBAAQ,CAACa,YAAYU,CAAb,EACJiC,eADI;AAzCL,KAAP;AA6CH,CAjED;;AAmEA;;;;;AAKAjf,kBAAkByf,UAAlB,GAA+B,UAAUz1B,MAAV,EAAkB;AAC7C,QAAM01B,cAAc1f,kBAAkBkb,YAAtC;AACA,QAAMoB,cAActc,kBAAkBic,mBAAlB,EAApB;AACA,QAAM0D,gBAAgBl0B,OAAOmI,IAAP,CAAY0oB,WAAZ,CAAtB;AACA,QAAMsD,aAAa,EAAnB;AACA,QAAI70B,UAAJ;AACA,QAAI80B,oBAAJ;;AAEA,WAAO,CAAC90B,IAAIf,OAAOsL,OAAP,CAAeoqB,WAAf,EAA4B30B,IAAI,CAAhC,CAAL,KAA4C,CAAnD,EAAsD;AAClD80B,sBAAc71B,OAAOe,IAAI,CAAX,CAAd;AACA,YAAI40B,cAAcrqB,OAAd,CAAsBuqB,WAAtB,MAAuC,CAAC,CAA5C,EAA+C;AAAE;AAAW;;AAE5DD,mBAAW/zB,IAAX,CAAgB;AACZ6N,mBAAO3O,CADK;AAEZgD,mBAAO8xB;AAFK,SAAhB;AAIH;;AAED,WAAOD,UAAP;AACH,CAnBD;;AAqBA;;;;;;AAMA5f,kBAAkBwF,QAAlB,GAA6B,UAAUlZ,IAAV,EAAgBtC,MAAhB,EAAwB;AACjD,QAAM81B,QAAQ/E,oBAAoBzuB,IAApB,CAAd;AACA,QAAMszB,aAAa5f,kBAAkByf,UAAlB,CAA6Bz1B,MAA7B,CAAnB;AACA,QAAMsyB,cAActc,kBAAkBic,mBAAlB,EAApB;AACA,QAAI8D,eAAepc,OAAO3Z,MAAP,CAAnB;AACA,QAAM01B,cAAc1f,kBAAkBkb,YAAtC;AACA,QAAIntB,cAAJ;AACA,QAAIiyB,qBAAJ;AACA,QAAIj1B,UAAJ;AACA,QAAIgxB,UAAJ;;AAEA,SAAKhxB,IAAI,CAAJ,EAAOgxB,IAAI6D,WAAW3zB,MAA3B,EAAmClB,IAAIgxB,CAAvC,EAA0ChxB,GAA1C,EAA+C;AAC3CgD,gBAAQ6xB,WAAW70B,CAAX,EAAcgD,KAAtB;AACAiyB,uBAAe1D,YAAYvuB,KAAZ,EAAmBmG,SAAnB,CAA6B4rB,KAA7B,CAAf;AACAC,uBAAeA,aAAa5xB,OAAb,CAAqB,IAAIf,MAAJ,CAAWsyB,cAAc3xB,KAAzB,EAAgC,GAAhC,CAArB,EAA2DiyB,YAA3D,CAAf;AACH;;AAED,WAAOD,YAAP;AACH,CAlBD;;AAoBA;;;;;AAKA/f,kBAAkBigB,SAAlB,CAA4Br2B,KAA5B,GAAoC,UAAUs2B,aAAV,EAAyB3uB,OAAzB,EAAkC;AAClE,QAAMytB,gBAAgBhf,kBAAkBgf,aAAlB,EAAtB;AACA,QAAM/D,WAAW,KAAKkF,iBAAL,CAAuBD,aAAvB,CAAjB;AACA,QAAME,aAAapgB,kBAAkBmb,uBAArC;AACA,QAAMkF,UAAU9uB,WAAWA,QAAQ8uB,OAAnC;AACA,QAAMC,aAAa,EAAnB;AACA,QAAMxsB,OAAO,EAAb;AACA,QAAIysB,oBAAJ;AACA,QAAIC,uBAAJ;AACA,QAAIC,mBAAJ;AACA,QAAIhd,YAAJ;AACA,QAAI1Y,UAAJ;AACA,QAAI21B,cAAJ;AACA,QAAIC,oBAAJ;AACA,QAAI5E,UAAJ;AACA,QAAInY,SAAS,EAAb;;AAEA,SAAK2c,WAAL,IAAoBvB,aAApB,EAAmC;AAC/B,YAAI,CAAC,GAAG4B,cAAH,CAAkBnsB,IAAlB,CAAuBuqB,aAAvB,EAAsCuB,WAAtC,CAAL,EAAyD;AAAE;AAAW;;AAEtEzsB,aAAK7H,MAAL,GAAc,CAAd;AACAu0B,yBAAiBxB,cAAcuB,WAAd,CAAjB;AACAE,qBAAaD,eAAe5tB,MAAf,CAAsB4tB,eAAev0B,MAAf,GAAwB,CAA9C,EAAiD,CAAjD,EAAoD,CAApD,CAAb;;AAEA,aAAKlB,IAAI,CAAJ,EAAOgxB,IAAIyE,eAAev0B,MAA/B,EAAuClB,IAAIgxB,CAA3C,EAA8ChxB,GAA9C,EAAmD;AAC/C21B,oBAAQF,eAAez1B,CAAf,CAAR;AACA0Y,kBAAMwX,SAASyF,MAAM51B,IAAf,CAAN;;AAEA,gBAAI2Y,QAAQ7N,SAAZ,EAAuB;AACnB9B,qBAAKjI,IAAL,CAAU,IAAV;AACH,aAFD,MAEO;AACHiI,qBAAKjI,IAAL,CAAU,CAAC60B,KAAD,EAAQjd,GAAR,CAAV;AACH;AACJ;;AAEDkd,sBAAcF,WAAWI,KAAX,CAAiB,IAAjB,EAAuB/sB,IAAvB,CAAd;;AAEA,YAAI,CAAC6sB,gBAAgB/qB,SAAhB,IAA6B+qB,gBAAgB,IAA9C,KAAuD,CAACN,OAA5D,EAAqE;AACjE;AACH;;AAEDC,mBAAWF,WAAWG,WAAX,CAAX,IAAsCI,WAAtC;AACH;;AAED,QAAIL,WAAWr0B,MAAX,IAAqB,KAAK60B,eAAL,CAAqBR,WAAWr0B,MAAhC,CAAzB,EACC;AACG2X,eAAOwN,OAAP,CAAekP,WAAW,CAAX,CAAf,EAA8B,CAA9B,EAAiC,CAAjC;AAAsC,KAF1C,MAGK;AACD1c,eAAOwN,OAAP,eAAkBkP,UAAlB;AACH;;AAED,WAAO1c,MAAP;AACH,CApDD;;AAsDA;;;;;AAKA5D,kBAAkBigB,SAAlB,CAA4BE,iBAA5B,GAAgD,UAAUD,aAAV,EAAyB;AACrE,QAAMl2B,SAAS,KAAKA,MAApB;AACA,QAAMsyB,cAActc,kBAAkBic,mBAAlB,EAApB;AACA,QAAMyD,cAAc1f,kBAAkBkb,YAAtC;AACA,QAAM0E,aAAa5f,kBAAkByf,UAAlB,CAA6Bz1B,MAA7B,CAAnB;AACA,QAAM+2B,WAAW,EAAjB;;AAEA,QAAIC,4BAAJ;AACA,QAAIC,eAAJ;AACA,QAAIC,iBAAJ;AACA,QAAIC,mBAAJ;AACA,QAAIC,oBAAJ;;AAEA,QAAIrF,UAAJ;AACA,QAAIhxB,UAAJ;;AAEAq2B,kBAAczd,OAAO3Z,MAAP,CAAd;;AAEA,QAAMq3B,WAAWzB,WAAW/0B,GAAX,CAAe;AAAA,eAAO0N,IAAIxK,KAAX;AAAA,KAAf,CAAjB;AACA,QAAMuzB,mBAAmB1B,WAAW3zB,MAApC;AACA,SAAKlB,IAAIu2B,mBAAmB,CAA5B,EAA+Bv2B,KAAK,CAApC,EAAuCA,GAAvC,EAA4C;AACxCm2B,mBAAWtB,WAAW70B,CAAX,EAAc2O,KAAzB;;AAEA,YAAIwnB,WAAW,CAAX,KAAiBE,YAAYn1B,MAAZ,GAAqB,CAA1C,EAA6C;AACzC+0B,kCAAsBE,QAAtB;AACA;AACH;;AAED,YAAIF,wBAAwBprB,SAA5B,EAAuC;AACnCorB,kCAAsBI,YAAYn1B,MAAlC;AACH;;AAEDk1B,qBAAaC,YAAYvD,SAAZ,CAAsBqD,WAAW,CAAjC,EAAoCF,mBAApC,CAAb;AACAI,sBAAcA,YAAYvD,SAAZ,CAAsB,CAAtB,EAAyBqD,WAAW,CAApC,IACV9zB,OAAO4tB,MAAP,CAAcmG,UAAd,CADU,GAEVC,YAAYvD,SAAZ,CAAsBmD,mBAAtB,EAA2CI,YAAYn1B,MAAvD,CAFJ;;AAIA+0B,8BAAsBE,QAAtB;AACH;;AAED,SAAKn2B,IAAI,CAAT,EAAYA,IAAIu2B,gBAAhB,EAAkCv2B,GAAlC,EAAuC;AACnCk2B,iBAASrB,WAAW70B,CAAX,CAAT;AACAq2B,sBAAcA,YAAYjzB,OAAZ,CAAoBuxB,cAAcuB,OAAOlzB,KAAzC,EAAgDuuB,YAAY2E,OAAOlzB,KAAnB,EAA0ByuB,OAA1B,EAAhD,CAAd;AACH;;AAED,QAAM+E,gBAAgBrB,cAAcnc,KAAd,CAAoB,IAAI3W,MAAJ,CAAWg0B,WAAX,CAApB,KAAgD,EAAtE;AACAG,kBAAc7H,KAAd;;AAEA,SAAK3uB,IAAI,CAAJ,EAAOgxB,IAAIsF,SAASp1B,MAAzB,EAAiClB,IAAIgxB,CAArC,EAAwChxB,GAAxC,EAA6C;AACzCg2B,iBAASM,SAASt2B,CAAT,CAAT,IAAwBw2B,cAAcx2B,CAAd,CAAxB;AACH;AACD,WAAOg2B,QAAP;AACH,CApDD;;AAsDA;;;;;AAKA/gB,kBAAkBigB,SAAlB,CAA4Bzb,aAA5B,GAA4C,UAAU0b,aAAV,EAAyB;AACjE,QAAI5zB,OAAO,IAAX;AACA,QAAI2J,OAAOqP,QAAP,CAAgB4a,aAAhB,CAAJ,EAAoC;AAChC5zB,eAAO,IAAI9C,IAAJ,CAAS02B,aAAT,CAAP;AACH,KAFD,MAEO,IAAI,CAAC,KAAKl2B,MAAN,IAAgBR,KAAKI,KAAL,CAAWs2B,aAAX,CAApB,EAA+C;AAClD5zB,eAAO,IAAI9C,IAAJ,CAAS02B,aAAT,CAAP;AACH,KAFM,MAGF;AACD,YAAMjF,WAAW,KAAKA,QAAL,GAAgB,KAAKrxB,KAAL,CAAWs2B,aAAX,CAAjC;AACA,YAAIjF,SAAShvB,MAAb,EAAqB;AACjB,iBAAKsY,UAAL,sCAAsB/a,IAAtB,mCAA8ByxB,QAA9B;AACA3uB,mBAAO,KAAKiY,UAAZ;AACH;AACJ;AACD,WAAOjY,IAAP;AACH,CAfD;;AAiBA0T,kBAAkBigB,SAAlB,CAA4Ba,eAA5B,GAA8C,UAAShY,GAAT,EAAc;AACxD,WAAOA,QAAQ,CAAR,IAAa,KAAK9e,MAAL,CAAY+Z,KAAZ,CAAkB,MAAlB,EAA0B9X,MAA9C;AACH,CAFD;;AAIA;;;;;;AAMA+T,kBAAkBigB,SAAlB,CAA4Bza,QAA5B,GAAuC,UAAUxb,MAAV,EAAkBk2B,aAAlB,EAAiC;AACpE,QAAI3b,mBAAJ;;AAEA,QAAI2b,aAAJ,EAAmB;AACf3b,qBAAa,KAAKA,UAAL,GAAkB,KAAKC,aAAL,CAAmB0b,aAAnB,CAA/B;AACH,KAFD,MAEO,IAAI,EAAE3b,aAAa,KAAKA,UAApB,CAAJ,EAAqC;AACxCA,qBAAa,KAAKC,aAAL,CAAmB0b,aAAnB,CAAb;AACH;;AAED,WAAOlgB,kBAAkBwF,QAAlB,CAA2BjB,UAA3B,EAAuCva,MAAvC,CAAP;AACH,CAVD;;;;;;;;;;;;;;ACluBA;AAAA;;;;;;AAMe,yEAACqH,IAAD,EAAU;AACrB,QAAI2R,MAAM/M,OAAOgN,iBAAjB;AACA,QAAIC,MAAMjN,OAAOkN,iBAAjB;;AAEA9R,SAAK1F,OAAL,CAAa,UAACgJ,CAAD,EAAO;AAChB,YAAIA,IAAIqO,GAAR,EAAa;AACTA,kBAAMrO,CAAN;AACH;AACD,YAAIA,IAAIuO,GAAR,EAAa;AACTA,kBAAMvO,CAAN;AACH;AACJ,KAPD;;AASA,WAAO,CAACqO,GAAD,EAAME,GAAN,CAAP;AACH,CAdD,E;;;;;;;;;;;;;;;;ACNA;AACA,IAAMse,eAAe,QAArB;AACA,IAAMC,gBAAgBh2B,OAAOw0B,SAAP,CAAiBvD,QAAvC;AACA,IAAMgF,cAAc,iBAApB;AACA,IAAMC,aAAa,gBAAnB;;AAEA,SAASC,cAAT,CAAwBrpB,GAAxB,EAA6BspB,SAA7B,EAAwC;AACpC,QAAI92B,IAAI82B,UAAU51B,MAAlB;AACA,QAAI61B,SAAS,CAAC,CAAd;;AAEA,WAAO/2B,CAAP,EAAU;AACN,YAAIwN,QAAQspB,UAAU92B,CAAV,CAAZ,EAA0B;AACtB+2B,qBAAS/2B,CAAT;AACA,mBAAO+2B,MAAP;AACH;AACD/2B,aAAK,CAAL;AACH;;AAED,WAAO+2B,MAAP;AACH;;AAED,SAASrL,KAAT,CAAesL,IAAf,EAAqBC,IAArB,EAA2BC,SAA3B,EAAsCC,MAAtC,EAA8CC,MAA9C,EAAsD;AAClD,QAAIxuB,IAAJ,EACIyuB,MADJ,EAEIC,MAFJ,EAGIhvB,GAHJ,EAIIivB,IAJJ;AAKA;AACA;AACA;;AAEA,QAAI,CAACH,MAAL,EAAa;AACTD,iBAAS,CAACH,IAAD,CAAT;AACAI,iBAAS,CAACH,IAAD,CAAT;AACH,KAHD,MAIK;AACDE,eAAOr2B,IAAP,CAAYk2B,IAAZ;AACAI,eAAOt2B,IAAP,CAAYm2B,IAAZ;AACH;;AAED,QAAIA,gBAAgB91B,KAApB,EAA2B;AACvB,aAAKyH,OAAO,CAAZ,EAAeA,OAAOquB,KAAK/1B,MAA3B,EAAmC0H,QAAQ,CAA3C,EAA8C;AAC1C,gBAAI;AACAyuB,yBAASL,KAAKpuB,IAAL,CAAT;AACA0uB,yBAASL,KAAKruB,IAAL,CAAT;AACH,aAHD,CAIA,OAAOsB,CAAP,EAAU;AACN;AACH;;AAED,gBAAI,QAAOotB,MAAP,yCAAOA,MAAP,OAAkBb,YAAtB,EAAoC;AAChC,oBAAI,EAAES,aAAaI,WAAWzsB,SAA1B,CAAJ,EAA0C;AACtCmsB,yBAAKpuB,IAAL,IAAa0uB,MAAb;AACH;AACJ,aAJD,MAKK;AACD,oBAAID,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBZ,YAAzC,EAAuD;AACnDY,6BAASL,KAAKpuB,IAAL,IAAa0uB,kBAAkBn2B,KAAlB,GAA0B,EAA1B,GAA+B,EAArD;AACH;AACDo2B,uBAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,oBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,6BAASL,KAAKpuB,IAAL,IAAauuB,OAAOI,IAAP,CAAtB;AACH,iBAFD,MAGK;AACD7L,0BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ;AACJ;AACJ,KA5BD,MA6BK;AACD,aAAKxuB,IAAL,IAAaquB,IAAb,EAAmB;AACf,gBAAI;AACAI,yBAASL,KAAKpuB,IAAL,CAAT;AACA0uB,yBAASL,KAAKruB,IAAL,CAAT;AACH,aAHD,CAIA,OAAOsB,CAAP,EAAU;AACN;AACH;;AAED,gBAAIotB,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBb,YAAzC,EAAuD;AACnD;AACA;AACA;AACA;AACAnuB,sBAAMouB,cAAchtB,IAAd,CAAmB4tB,MAAnB,CAAN;AACA,oBAAIhvB,QAAQquB,WAAZ,EAAyB;AACrB,wBAAIU,WAAW,IAAX,IAAmB,QAAOA,MAAP,yCAAOA,MAAP,OAAkBZ,YAAzC,EAAuD;AACnDY,iCAASL,KAAKpuB,IAAL,IAAa,EAAtB;AACH;AACD2uB,2BAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,wBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,iCAASL,KAAKpuB,IAAL,IAAauuB,OAAOI,IAAP,CAAtB;AACH,qBAFD,MAGK;AACD7L,8BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ,iBAXD,MAYK,IAAI9uB,QAAQsuB,UAAZ,EAAwB;AACzB,wBAAIS,WAAW,IAAX,IAAmB,EAAEA,kBAAkBl2B,KAApB,CAAvB,EAAmD;AAC/Ck2B,iCAASL,KAAKpuB,IAAL,IAAa,EAAtB;AACH;AACD2uB,2BAAOV,eAAeS,MAAf,EAAuBF,MAAvB,CAAP;AACA,wBAAIG,SAAS,CAAC,CAAd,EAAiB;AACbF,iCAASL,KAAKpuB,IAAL,IAAauuB,OAAOI,IAAP,CAAtB;AACH,qBAFD,MAGK;AACD7L,8BAAM2L,MAAN,EAAcC,MAAd,EAAsBJ,SAAtB,EAAiCC,MAAjC,EAAyCC,MAAzC;AACH;AACJ,iBAXI,MAYA;AACDJ,yBAAKpuB,IAAL,IAAa0uB,MAAb;AACH;AACJ,aAjCD,MAkCK;AACD,oBAAIJ,aAAaI,WAAWzsB,SAA5B,EAAuC;AACnC;AACH;AACDmsB,qBAAKpuB,IAAL,IAAa0uB,MAAb;AACH;AACJ;AACJ;AACD,WAAON,IAAP;AACH;;AAGD,SAASrW,OAAT,CAAkBqW,IAAlB,EAAwBC,IAAxB,EAA8BC,SAA9B,EAAyC;AACrC;AACA,QAAI,QAAOF,IAAP,yCAAOA,IAAP,OAAgBP,YAAhB,IAAgC,QAAOQ,IAAP,yCAAOA,IAAP,OAAgBR,YAApD,EAAkE;AAC9D,eAAO,IAAP;AACH;;AAED,QAAI,QAAOQ,IAAP,yCAAOA,IAAP,OAAgBR,YAAhB,IAAgCQ,SAAS,IAA7C,EAAmD;AAC/C,eAAOD,IAAP;AACH;;AAED,QAAI,QAAOA,IAAP,yCAAOA,IAAP,OAAgBP,YAApB,EAAkC;AAC9BO,eAAOC,gBAAgB91B,KAAhB,GAAwB,EAAxB,GAA6B,EAApC;AACH;AACDuqB,UAAMsL,IAAN,EAAYC,IAAZ,EAAkBC,SAAlB;AACA,WAAOF,IAAP;AACH;;;;;;;;;;;;;;;;;;;;;;;;;;;AC5ID;;AAEA;;;;;;AAMO,SAAS3vB,OAAT,CAAkBqR,GAAlB,EAAuB;AAC1B,WAAOvX,MAAMkG,OAAN,CAAcqR,GAAd,CAAP;AACH;;AAED;;;;;;AAMO,SAAS8e,QAAT,CAAmB9e,GAAnB,EAAwB;AAC3B,WAAOA,QAAQhY,OAAOgY,GAAP,CAAf;AACH;;AAED;;;;;;AAMO,SAAS+e,QAAT,CAAmB/e,GAAnB,EAAwB;AAC3B,WAAO,OAAOA,GAAP,KAAe,QAAtB;AACH;;AAED;;;;;;AAMO,SAASoV,UAAT,CAAqBpV,GAArB,EAA0B;AAC7B,WAAO,OAAOA,GAAP,KAAe,UAAtB;AACH;;AAED;;;;;;AAMO,SAASgf,YAAT,CAAuBpxB,IAAvB,EAA6B;AAChC,wCAAW,IAAImQ,GAAJ,CAAQnQ,IAAR,CAAX;AACH;;AAEM,IAAMwP,cAAc,SAAdA,WAAc;AAAA,mBAAY,IAAIrX,IAAJ,GAAWib,OAAX,EAAZ,GAAmCY,KAAKqd,KAAL,CAAWrd,KAAKsd,MAAL,KAAgB,KAA3B,CAAnC;AAAA,CAApB;;AAEP;;;;;;;AAOO,SAAS5O,UAAT,CAAoB6O,IAApB,EAA0BC,IAA1B,EAAgC;AACnC,QAAI,CAACzwB,QAAQwwB,IAAR,CAAD,IAAkB,CAACxwB,QAAQywB,IAAR,CAAvB,EAAsC;AAClC,eAAOD,SAASC,IAAhB;AACH;;AAED,QAAID,KAAK32B,MAAL,KAAgB42B,KAAK52B,MAAzB,EAAiC;AAC7B,eAAO,KAAP;AACH;;AAED,SAAK,IAAIlB,IAAI,CAAb,EAAgBA,IAAI63B,KAAK32B,MAAzB,EAAiClB,GAAjC,EAAsC;AAClC,YAAI63B,KAAK73B,CAAL,MAAY83B,KAAK93B,CAAL,CAAhB,EAAyB;AACrB,mBAAO,KAAP;AACH;AACJ;;AAED,WAAO,IAAP;AACH;;AAED;;;;;;AAMO,SAASyY,YAAT,CAAsBC,GAAtB,EAA2B;AAC9B,WAAOA,GAAP;AACH;;AAED;;;;;;AAMO,IAAMvR,mBAAmB,SAAnBA,gBAAmB,CAACb,IAAD,EAAU;AACtC,QAAImxB,SAASnxB,IAAT,CAAJ,EAAoB;AAChB,eAAOF,iDAAUA,CAACO,OAAlB;AACH,KAFD,MAEO,IAAIU,QAAQf,IAAR,KAAiBe,QAAQf,KAAK,CAAL,CAAR,CAArB,EAAuC;AAC1C,eAAOF,iDAAUA,CAACK,OAAlB;AACH,KAFM,MAEA,IAAIY,QAAQf,IAAR,MAAkBA,KAAKpF,MAAL,KAAgB,CAAhB,IAAqBs2B,SAASlxB,KAAK,CAAL,CAAT,CAAvC,CAAJ,EAA+D;AAClE,eAAOF,iDAAUA,CAACS,SAAlB;AACH;AACD,WAAO,IAAP;AACH,CATM,C;;;;;;;;;;;;AC/FP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA;AACA;AACA;;;;;;;;;;;;;;;;;;;ACHA;;AAEA;;;;;;;;;IAQMkxB,Y;AACF,4BAAe;AAAA;;AAAA;;AACX,aAAK5yB,KAAL,GAAa,IAAIC,GAAJ,EAAb;AACA,aAAKD,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6BklB,oDAA7B;;AAEAnqB,eAAOka,OAAP,CAAe4P,gDAAf,EAAuB5pB,OAAvB,CAA+B,UAAC1C,GAAD,EAAS;AACpC,kBAAKiH,KAAL,CAAWQ,GAAX,CAAezH,IAAI,CAAJ,CAAf,EAAuBA,IAAI,CAAJ,CAAvB;AACH,SAFD;AAGH;;AAED;;;;;;;;;;;;;yCAS2B;AACvB,gBAAI,CAAC,UAAOgD,MAAZ,EAAoB;AAChB,uBAAO,KAAKiE,KAAL,CAAWc,GAAX,CAAe,YAAf,CAAP;AACH;;AAED,gBAAI+xB,0DAAJ;;AAEA,gBAAI,OAAOA,OAAP,KAAmB,UAAvB,EAAmC;AAC/B,qBAAK7yB,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6BqyB,OAA7B;AACH,aAFD,MAEO;AACHA,0BAAUpf,OAAOof,OAAP,CAAV;AACA,oBAAIt3B,OAAOmI,IAAP,CAAY2hB,gDAAZ,EAAoBjgB,OAApB,CAA4BytB,OAA5B,MAAyC,CAAC,CAA9C,EAAiD;AAC7C,yBAAK7yB,KAAL,CAAWQ,GAAX,CAAe,YAAf,EAA6B6kB,gDAAMA,CAACwN,OAAP,CAA7B;AACH,iBAFD,MAEO;AACH,0BAAM,IAAIhxB,KAAJ,cAAqBgxB,OAArB,4BAAN;AACH;AACJ;AACD,mBAAO,IAAP;AACH;;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iCAgCUj4B,I,EAAMi4B,O,EAAS;AAAA;;AACrB,gBAAI,OAAOA,OAAP,KAAmB,UAAvB,EAAmC;AAC/B,sBAAM,IAAIhxB,KAAJ,CAAU,8BAAV,CAAN;AACH;;AAEDjH,mBAAO6Y,OAAO7Y,IAAP,CAAP;AACA,iBAAKoF,KAAL,CAAWQ,GAAX,CAAe5F,IAAf,EAAqBi4B,OAArB;;AAEA,mBAAO,YAAM;AAAE,uBAAKC,YAAL,CAAkBl4B,IAAlB;AAA0B,aAAzC;AACH;;;qCAEaA,I,EAAM;AAChB,gBAAI,KAAKoF,KAAL,CAAWa,GAAX,CAAejG,IAAf,CAAJ,EAA0B;AACtB,qBAAKoF,KAAL,CAAWY,MAAX,CAAkBhG,IAAlB;AACH;AACJ;;;gCAEQA,I,EAAM;AACX,gBAAIA,gBAAgBF,QAApB,EAA8B;AAC1B,uBAAOE,IAAP;AACH;AACD,mBAAO,KAAKoF,KAAL,CAAWc,GAAX,CAAelG,IAAf,CAAP;AACH;;;;;;AAGL,IAAMkM,eAAgB,YAAY;AAC9B,QAAI9G,QAAQ,IAAZ;;AAEA,aAASgB,QAAT,GAAqB;AACjB,YAAIhB,UAAU,IAAd,EAAoB;AAChBA,oBAAQ,IAAI4yB,YAAJ,EAAR;AACH;AACD,eAAO5yB,KAAP;AACH;AACD,WAAOgB,UAAP;AACH,CAVqB,EAAtB;;AAYe8F,2EAAf,E;;;;;;;;;;;;;;;;;;ACtHA;;AAEA;;;;;;;IAMMwB,K;;AAEJ;;;;;;AAME,mBAAatP,KAAb,EAAoB4c,QAApB,EAA8B5S,KAA9B,EAAqC;AAAA;;AACjC,YAAM2S,iBAAiB6J,qEAAqBA,CAACxc,KAAtB,EAA6BhK,KAA7B,CAAvB;;AAEAuC,eAAOw3B,gBAAP,CAAwB,IAAxB,EAA8B;AAC1BjT,oBAAQ;AACJkT,4BAAY,KADR;AAEJC,8BAAc,KAFV;AAGJC,0BAAU,KAHN;AAIJl6B;AAJI,aADkB;AAO1Bm6B,6BAAiB;AACbH,4BAAY,KADC;AAEbC,8BAAc,KAFD;AAGbC,0BAAU,KAHG;AAIbl6B,uBAAO2c;AAJM,aAPS;AAa1Byd,4BAAgB;AACZJ,4BAAY,KADA;AAEZC,8BAAc,KAFF;AAGZC,0BAAU,KAHE;AAIZl6B,uBAAO4c;AAJK;AAbU,SAA9B;;AAqBA,aAAK5S,KAAL,GAAaA,KAAb;AACH;;AAEH;;;;;;;;;;;AAuBA;;;;;;;mCAOc;AACR,mBAAOyQ,OAAO,KAAKza,KAAZ,CAAP;AACH;;AAEH;;;;;;;;;kCAMa;AACP,mBAAO,KAAKA,KAAZ;AACH;;;4BArCY;AACT,mBAAO,KAAK8mB,MAAZ;AACH;;AAED;;;;;;4BAGsB;AAClB,mBAAO,KAAKqT,eAAZ;AACH;;AAED;;;;;;4BAGqB;AACjB,mBAAO,KAAKC,cAAZ;AACH;;;;;;AAwBU9qB,oEAAf,E","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = \"./src/index.js\");\n","export default function autoType(object) {\n for (var key in object) {\n var value = object[key].trim(), number;\n if (!value) value = null;\n else if (value === \"true\") value = true;\n else if (value === \"false\") value = false;\n else if (value === \"NaN\") value = NaN;\n else if (!isNaN(number = +value)) value = number;\n else if (/^([-+]\\d{2})?\\d{4}(-\\d{2}(-\\d{2})?)?(T\\d{2}:\\d{2}(:\\d{2}(\\.\\d{3})?)?(Z|[-+]\\d{2}:\\d{2})?)?$/.test(value)) value = new Date(value);\n else continue;\n object[key] = value;\n }\n return object;\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","export {default as dsvFormat} from \"./dsv\";\nexport {csvParse, csvParseRows, csvFormat, csvFormatBody, csvFormatRows} from \"./csv\";\nexport {tsvParse, tsvParseRows, tsvFormat, tsvFormatBody, tsvFormatRows} from \"./tsv\";\nexport {default as autoType} from \"./autoType\";\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","import DataConverter from './model/dataConverter';\nimport { DSVStringConverter, DSVArrayConverter, JSONConverter, AutoDataConverter } from './defaultConverters';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n * Sets the given converters in the store and returns the store\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n * Registers a Converter of type DataConverter\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n * Rempves a converter from store\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","export { default as DSVStringConverter } from './dsvStringConverter';\nexport { default as JSONConverter } from './jsonConverter';\nexport { default as DSVArrayConverter } from './dsvArrayConverter';\nexport { default as AutoDataConverter } from './autoConverter';\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import converterStore from './dataConverterStore';\nimport DataConverter from './model/dataConverter';\n\nexport { DataConverter, converterStore };\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace,\n sourceId: propagationSourceId,\n propagationSource: this },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModel, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * FilteringMode determines if resultant DataModel should be created from selection set or rejection set.\n *\n * The following modes are available\n * - `NORMAL`: Only entries from selection set are included in the resulatant DataModel instance\n * - `INVERSE`: Only entries from rejection set are included in the resulatant DataModel instance\n * - ALL: Both the entries from selection and rejection set are returned in two different DataModel instance\n */\n\nexport { default as DataFormat } from './data-format';\nexport { default as DimensionSubtype } from './dimension-subtype';\nexport { default as MeasureSubtype } from './measure-subtype';\nexport { default as FieldType } from './field-type';\nexport { default as FilteringMode } from './filtering-mode';\nexport { default as GROUP_BY_FUNCTIONS } from './group-by-functions';\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport ContinuousParser from '../parsers/continuous-parser';\nimport { calculateContinuousDomain } from '../helper';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n return calculateContinuousDomain(this.partialField.data, this.rowDiffset);\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","import { rowDiffsetIterator } from '../operator/row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nexport const calculateContinuousDomain = (data, rowDiffset) => {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n};\n","export { default as Dimension } from './dimension';\nexport { default as Measure } from './measure';\nexport { default as FieldParser } from './parsers/field-parser';\nexport { default as fieldRegistry } from './field-registry';\nexport { columnMajor } from '../utils';\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\nimport { calculateContinuousDomain } from '../helper';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n return calculateContinuousDomain(this.partialField.data, this.rowDiffset);\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS, ROW_ID } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects;\n\n const selectorHelperFn = index => selectFn(\n cachedValueObjects[index],\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn, rowId) => {\n let key = fn(arr, data, 0, rowId);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i, rowId)}`;\n }\n return key;\n};\n\nconst keyFn = (arr, fields, idx, rowId) => {\n const field = arr[idx];\n const val = field === ROW_ID ? rowId : fields[field].internalValue;\n return val;\n};\n\nconst domainChecker = (val, domain) => {\n const domainArr = domain[0] instanceof Array ? domain : [domain];\n return domainArr.some(dom => val >= dom[0] && val <= dom[1]);\n};\n\nconst boundsChecker = {\n [MeasureSubtype.CONTINUOUS]: domainChecker,\n [DimensionSubtype.TEMPORAL]: domainChecker\n};\n\nconst isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain);\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const { filterByDim = true, filterByMeasure = false, clone = true } = config;\n const clonedModel = clone ? cloneWithAllFields(model) : model;\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => (({ criteria = {} }) => {\n const { identifiers = [[], []], range } = criteria;\n let [fieldNames = [], values = []] = identifiers;\n const indices = fieldNames.reduce((map, name, i) => {\n map[name] = i;\n return map;\n }, {});\n fieldNames = fieldNames.filter(field => (field in modelFieldsConfig &&\n modelFieldsConfig[field].def.type === FieldType.DIMENSION) || field === ROW_ID);\n const dLen = fieldNames.length;\n const valuesMap = {};\n\n if (dLen) {\n for (let i = 1, len = identifiers.length; i < len; i++) {\n const row = identifiers[i];\n const key = `${fieldNames.map((field) => {\n const idx = indices[field];\n return row[idx];\n })}`;\n valuesMap[key] = 1;\n }\n }\n let rangeKeys = Object.keys(range || {}).filter(field => field in modelFieldsConfig);\n const hasData = values.length || rangeKeys.length;\n\n if (!filterByMeasure) {\n rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.MEASURE);\n }\n\n if (!filterByDim) {\n rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.DIMENSION);\n }\n\n return hasData ? (fields, i) => {\n let present = true;\n if (filterByDim) {\n present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true;\n }\n\n return rangeKeys.every((field) => {\n const val = fields[field].internalValue;\n return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype);\n }) && present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select((fields, i) => fns.every(fn => fn(fields, i)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select((fields, i) => fns.some(fn => fn(fields, i)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const { type, subtype, name } = unitSchema;\n if (type === FieldType.DIMENSION || type === FieldType.MEASURE) {\n if (!fieldRegistry.has(subtype)) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n } else {\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const excludeModels = propModelInf.excludeModels || [];\n const criterias = propModelInf.criteria;\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const matchingCriteria = criterias.filter(val => val.groupedModel === child);\n let selectionModel = applyExistingOperationOnModel(propModel, child);\n\n if (matchingCriteria.length) {\n selectionModel = filterPropagationModel(selectionModel, matchingCriteria, {\n filterByDim: false,\n filterByMeasure: true,\n clone: false\n });\n }\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n const addGroupedModel = ({ config: conf, model }) => {\n const { criteria: crit } = conf;\n let groupedModel;\n\n if (crit !== null && crit.fields.some(d => d.type === FieldType.MEASURE)) {\n groupedModel = getRootGroupByModel(model);\n }\n return Object.assign({}, conf, {\n groupedModel\n });\n };\n\n let criterias = [];\n\n if (identifiers === null) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(addGroupedModel);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria.map(addGroupedModel), {\n criteria: identifiers,\n groupedModel: identifiers !== null && identifiers.fields.some(d => d.type === FieldType.MEASURE) ?\n getRootGroupByModel(propagationInf.propagationSource) : null\n }]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n criterias.forEach((inf) => {\n const { criteria: crit } = inf;\n const propagationModel = filterPropagationModel(rootModel, crit, {\n filterByMeasure: !!crit.find(d => d.groupedModel === rootModel)\n });\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n criteria: crit\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModel, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, {\n model: rootModel,\n groupByModel: getRootGroupByModel(actionInf.model)\n }, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId,\n propagationSource: actionInf.model\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\nimport { FieldType } from '../enums';\nimport { ROW_ID } from '../constants';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: ROW_ID,\n type: FieldType.DIMENSION\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","export { createBinnedFieldData } from './bucket-creator';\nexport { compose, bin, select, project, groupBy as groupby } from './compose';\nexport { calculateVariable, sort } from './pure-operators';\nexport { crossProduct } from './cross-product';\nexport { dataBuilder } from './data-builder';\nexport { difference } from './difference';\nexport { getCommonSchema } from './get-common-schema';\nexport { defReducer, fnList } from './group-by-function';\nexport { groupBy, getFieldArr, getReducerObj } from './group-by';\nexport { mergeSort } from './merge-sort';\nexport { naturalJoinFilter } from './natural-join-filter-function';\nexport { naturalJoin } from './natural-join';\nexport { leftOuterJoin, rightOuterJoin, fullOuterJoin } from './outer-join';\nexport { rowDiffsetIterator } from './row-diffset-iterator';\nexport { union } from './union';\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * Generates domain for measure field.\n *\n * @param {Array} data - The array of data.\n * @return {Array} Returns the measure domain.\n */\nexport default (data) => {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n data.forEach((d) => {\n if (d < min) {\n min = d;\n }\n if (d > max) {\n max = d;\n }\n });\n\n return [min, max];\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","export { default as DateTimeFormatter } from './date-time-formatter';\nexport { default as columnMajor } from './column-major';\nexport { default as generateMeasureDomain } from './domain-generator';\nexport { default as extend2 } from './extend2';\nexport * from './helper';\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n"],"sourceRoot":""} \ No newline at end of file diff --git a/src/fields/continuous/index.js b/src/fields/continuous/index.js index a9b6688..d83ca6d 100644 --- a/src/fields/continuous/index.js +++ b/src/fields/continuous/index.js @@ -1,8 +1,7 @@ -import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { MeasureSubtype } from '../../enums'; import Measure from '../measure'; -import InvalidAwareTypes from '../../invalid-aware-types'; import ContinuousParser from '../parsers/continuous-parser'; +import { calculateContinuousDomain } from '../helper'; /** * Represents continuous field subtype. @@ -31,25 +30,7 @@ export default class Continuous extends Measure { * @return {Array} Returns the min and max values. */ calculateDataDomain () { - let min = Number.POSITIVE_INFINITY; - let max = Number.NEGATIVE_INFINITY; - - // here don't use this.data() as the iteration will be occurred two times on same data. - rowDiffsetIterator(this.rowDiffset, (i) => { - const datum = this.partialField.data[i]; - if (datum instanceof InvalidAwareTypes) { - return; - } - - if (datum < min) { - min = datum; - } - if (datum > max) { - max = datum; - } - }); - - return [min, max]; + return calculateContinuousDomain(this.partialField.data, this.rowDiffset); } static parser() { diff --git a/src/fields/helper.js b/src/fields/helper.js new file mode 100644 index 0000000..9ed11eb --- /dev/null +++ b/src/fields/helper.js @@ -0,0 +1,24 @@ +import { rowDiffsetIterator } from '../operator/row-diffset-iterator'; +import InvalidAwareTypes from '../invalid-aware-types'; + +export const calculateContinuousDomain = (data, rowDiffset) => { + let min = Number.POSITIVE_INFINITY; + let max = Number.NEGATIVE_INFINITY; + + // here don't use this.data() as the iteration will be occurred two times on same data. + rowDiffsetIterator(rowDiffset, (i) => { + const datum = data[i]; + if (datum instanceof InvalidAwareTypes) { + return; + } + + if (datum < min) { + min = datum; + } + if (datum > max) { + max = datum; + } + }); + + return [min, max]; +}; diff --git a/src/fields/temporal/index.js b/src/fields/temporal/index.js index 0df6bbf..4fcdd2b 100644 --- a/src/fields/temporal/index.js +++ b/src/fields/temporal/index.js @@ -3,6 +3,7 @@ import Dimension from '../dimension'; import { DateTimeFormatter } from '../../utils'; import InvalidAwareTypes from '../../invalid-aware-types'; import TemporalParser from '../parsers/temporal-parser'; +import { calculateContinuousDomain } from '../helper'; /** * Represents temporal field subtype. @@ -33,20 +34,7 @@ export default class Temporal extends Dimension { * @return {Array} Returns the unique values. */ calculateDataDomain () { - const hash = new Set(); - const domain = []; - - // here don't use this.data() as the iteration will be - // occurred two times on same data. - rowDiffsetIterator(this.rowDiffset, (i) => { - const datum = this.partialField.data[i]; - if (!hash.has(datum)) { - hash.add(datum); - domain.push(datum); - } - }); - - return domain; + return calculateContinuousDomain(this.partialField.data, this.rowDiffset); } diff --git a/src/fields/temporal/index.spec.js b/src/fields/temporal/index.spec.js index 86afd4d..f66bf80 100644 --- a/src/fields/temporal/index.spec.js +++ b/src/fields/temporal/index.spec.js @@ -34,7 +34,6 @@ describe('Temporal', () => { it('should return the field domain', () => { const expected = [ new Date(2017, 3 - 1, 1).getTime(), - new Date(2017, 3 - 1, 2).getTime(), new Date(2017, 3 - 1, 3).getTime() ]; expect(tempField.calculateDataDomain()).to.eql(expected); @@ -49,9 +48,7 @@ describe('Temporal', () => { const expected = [ new Date(2017, 3 - 1, 2).getTime(), - new Date(2017, 3 - 1, 3).getTime(), - new Date(2019, 11 - 1, 7).getTime(), - DataModel.InvalidAwareTypes.NULL + new Date(2019, 11 - 1, 7).getTime() ]; expect(tempField.calculateDataDomain()).to.eql(expected); }); diff --git a/src/helper.js b/src/helper.js index f73a3d4..ef85a76 100644 --- a/src/helper.js +++ b/src/helper.js @@ -534,14 +534,9 @@ const getFilteredModel = (propModel, path) => { }; const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => { - const nonTraversingModel = propModelInf.nonTraversingModel; const excludeModels = propModelInf.excludeModels || []; const criterias = propModelInf.criteria; - if (dataModel === nonTraversingModel) { - return; - } - const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true; propagate && dataModel.handlePropagation(propModel, config); @@ -592,6 +587,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf const filter = config.filterFn || (() => true); return filter(entry, config); }; + const addGroupedModel = ({ config: conf, model }) => { const { criteria: crit } = conf; let groupedModel; @@ -660,7 +656,9 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf criterias.forEach((inf) => { const { criteria: crit } = inf; - const propagationModel = filterPropagationModel(rootModel, crit); + const propagationModel = filterPropagationModel(rootModel, crit, { + filterByMeasure: !!crit.find(d => d.groupedModel === rootModel) + }); const path = inf.path; if (path) { diff --git a/src/index.spec.js b/src/index.spec.js index aacc022..55ccc7f 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -3,7 +3,7 @@ import { expect } from 'chai'; import { FilteringMode, DataFormat } from './enums'; -import { DM_DERIVATIVES } from './constants'; +import { DM_DERIVATIVES, ROW_ID } from './constants'; import DataModel from './index'; import pkg from '../package.json'; import InvalidAwareTypes from './invalid-aware-types'; @@ -407,7 +407,7 @@ describe('DataModel', () => { schema: [ { name: 'name', type: 'dimension', subtype: 'categorical' }, { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' }, - { name: 'uid', type: 'identifier' } + { name: ROW_ID, type: 'dimension' } ], uids: [0, 1, 2] }; @@ -1935,12 +1935,14 @@ describe('DataModel', () => { let projected; let selected; let grouped; + let groupedChild; beforeEach(() => { dataModel = new DataModel(data1, schema1); projected = dataModel.project(['profit']); selected = dataModel.select(fields => fields.profit.valueOf() > 10); grouped = dataModel.groupBy(['first']); + groupedChild = grouped.select(() => true); // setup listeners projected.on('propagation', () => { projectionFlag = true; @@ -2042,6 +2044,27 @@ describe('DataModel', () => { projectionFlag && selectionFlag && groupByFlag ).to.be.true; }); + + it('Should propagate when measures are present in criteria', () => { + groupedChild.propagate(propModel1, { + action: 'highlight', + isMutableAction: true, + sourceId: 'canvas-1', + applyOnSource: false, + propagateToSource: true, + criteria: propModel1 + }, true); + + groupedChild.propagate(propModel1, { + action: 'highlight', + isMutableAction: true, + sourceId: 'canvas-2', + applyOnSource: false, + propagateToSource: true, + criteria: propModel1 + }, true); + expect(projectionFlag && selectionFlag && groupByFlag).to.be.true; + }); }); describe('#getUIDs', () => { diff --git a/src/operator/data-builder.js b/src/operator/data-builder.js index 4e4b84d..d2238fa 100644 --- a/src/operator/data-builder.js +++ b/src/operator/data-builder.js @@ -1,5 +1,7 @@ import { rowDiffsetIterator } from './row-diffset-iterator'; import { sortData } from './sort'; +import { FieldType } from '../enums'; +import { ROW_ID } from '../constants'; /** * Builds the actual data array. @@ -48,8 +50,8 @@ export function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetai if (addUid) { retObj.schema.push({ - name: 'uid', - type: 'identifier' + name: ROW_ID, + type: FieldType.DIMENSION }); } From 58ec694d3d842761ed42ff18ec50c5d746c4f1ea Mon Sep 17 00:00:00 2001 From: adarshlilha Date: Mon, 2 Dec 2019 18:44:45 +0530 Subject: [PATCH 20/20] Make a build and bump version --- checklist.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/checklist.txt b/checklist.txt index d4db9f1..4b9edba 100644 --- a/checklist.txt +++ b/checklist.txt @@ -11,7 +11,7 @@ Checklist to release a new build to NPM: * Now, create a release-ready build, Run: npm run build * Test the dist/* files if needed * Now commit all the changes with this message: "Make a build and bump version" - * Then finish the release, Run: git flow finish release [-s] and enter release notes + * Then finish the release, Run: git flow release finish [-s] and enter release notes * Push all changes and tags to remote, Run: git push origin master && git push origin develop && git push origin --tags * Edit the title of the released tag in Github * When everything is fine, it's ready to release