diff --git a/checklist.txt b/checklist.txt new file mode 100644 index 0000000..4b9edba --- /dev/null +++ b/checklist.txt @@ -0,0 +1,19 @@ +Checklist to release a new build to NPM: + * Clone the repository if needed + * Checkout develop branch and make sure develop branch is up-to-date with remote + * If git flow is not initialized yet, Run: git flow init + * Create a new release branch, Run: git flow release start + * Delete all existing npm dependencies, Run: rm -rf node_modules package-lock.json + * Install dependencies again, Run: npm install + * Update package version with new release version in package.json + * Update LICENSE file if needed + * Update README.md file if needed + * Now, create a release-ready build, Run: npm run build + * Test the dist/* files if needed + * Now commit all the changes with this message: "Make a build and bump version" + * Then finish the release, Run: git flow release finish [-s] and enter release notes + * Push all changes and tags to remote, Run: git push origin master && git push origin develop && git push origin --tags + * Edit the title of the released tag in Github + * When everything is fine, it's ready to release + * Checkout master branch + * Now if everything is fine, release it to npm, Run: npm publish \ No newline at end of file diff --git a/dist/datamodel.js b/dist/datamodel.js index e351b81..6d220a4 100644 --- a/dist/datamodel.js +++ b/dist/datamodel.js @@ -1,2 +1,2 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define("DataModel",[],t):"object"==typeof exports?exports.DataModel=t():e.DataModel=t()}(window,(function(){return function(e){var t={};function n(r){if(t[r])return t[r].exports;var a=t[r]={i:r,l:!1,exports:{}};return e[r].call(a.exports,a,a.exports,n),a.l=!0,a.exports}return n.m=e,n.c=t,n.d=function(e,t,r){n.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:r})},n.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.t=function(e,t){if(1&t&&(e=n(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var a in e)n.d(r,a,function(t){return e[t]}.bind(null,a));return r},n.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return n.d(t,"a",t),t},n.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},n.p="",n(n.s=1)}([function(e){e.exports=JSON.parse('{"name":"datamodel","description":"Relational algebra compliant in-memory tabular data store","homepage":"https://github.com/chartshq/datamodel","version":"2.2.3","license":"MIT","main":"dist/datamodel.js","keywords":["datamodel","data","relational","algebra","model","muze","fusioncharts","table","tabular","operation"],"author":"Muzejs.org (https://muzejs.org/)","repository":{"type":"git","url":"https://github.com/chartshq/datamodel.git"},"contributors":[{"name":"Akash Goswami","email":"akashgoswami90s@gmail.com"},{"name":"Subhash Haldar"},{"name":"Rousan Ali","email":"rousanali786@gmail.com","url":"https://rousan.io"},{"name":"Ujjal Kumar Dutta","email":"duttaujjalkumar@live.com"},{"name":"Ranajit Banerjee","email":"ranajit.113124@gmail.com"},{"name":"Adarsh Lilha","email":"adarshlilha@gmail.com"}],"dependencies":{"d3-dsv":"^1.0.8"},"devDependencies":{"babel-cli":"6.26.0","babel-core":"^6.26.3","babel-eslint":"6.1.2","babel-loader":"^7.1.4","babel-plugin-transform-runtime":"^6.23.0","babel-preset-env":"^1.7.0","babel-preset-es2015":"^6.24.1","babel-preset-flow":"^6.23.0","chai":"3.5.0","cross-env":"^5.0.5","eslint":"3.19.0","eslint-config-airbnb":"15.1.0","eslint-plugin-import":"2.7.0","eslint-plugin-jsx-a11y":"5.1.1","eslint-plugin-react":"7.3.0","istanbul-instrumenter-loader":"^3.0.0","jsdoc":"3.5.5","json2yaml":"^1.1.0","karma":"1.7.1","karma-chai":"0.1.0","karma-chrome-launcher":"2.1.1","karma-coverage-istanbul-reporter":"^1.3.0","karma-mocha":"1.3.0","karma-spec-reporter":"0.0.31","karma-webpack":"2.0.3","marked":"^0.5.0","mocha":"3.4.2","mocha-webpack":"0.7.0","transform-runtime":"0.0.0","webpack":"^4.12.0","webpack-cli":"^3.0.7","webpack-dev-server":"^3.1.4"},"scripts":{"test":"npm run lint && npm run ut","ut":"karma start karma.conf.js","utd":"karma start --single-run false --browsers Chrome karma.conf.js ","build":"npm run build:prod","build:dev":"webpack --mode development","build:prod":"webpack --mode production","start":"webpack-dev-server --config webpack.config.dev.js --mode development --open","lint":"eslint ./src","lint-errors":"eslint --quiet ./src","docs":"rm -rf yaml && mkdir yaml && jsdoc -c jsdoc.conf.json"}}')},function(e,t,n){var r=n(2);e.exports=r.default?r.default:r},function(e,t,n){"use strict";n.r(t);var r={};n.r(r),n.d(r,"DataFormat",(function(){return o})),n.d(r,"DimensionSubtype",(function(){return u})),n.d(r,"MeasureSubtype",(function(){return c})),n.d(r,"FieldType",(function(){return f})),n.d(r,"FilteringMode",(function(){return l})),n.d(r,"GROUP_BY_FUNCTIONS",(function(){return s}));var a={};n.r(a),n.d(a,"DSVArr",(function(){return rt})),n.d(a,"DSVStr",(function(){return mt})),n.d(a,"FlatJSON",(function(){return yt})),n.d(a,"Auto",(function(){return bt}));var i={};n.r(i),n.d(i,"sum",(function(){return un})),n.d(i,"avg",(function(){return cn})),n.d(i,"min",(function(){return fn})),n.d(i,"max",(function(){return ln})),n.d(i,"first",(function(){return sn})),n.d(i,"last",(function(){return dn})),n.d(i,"count",(function(){return pn})),n.d(i,"sd",(function(){return hn}));var o={FLAT_JSON:"FlatJSON",DSV_STR:"DSVStr",DSV_ARR:"DSVArr",AUTO:"Auto"},u={CATEGORICAL:"categorical",TEMPORAL:"temporal",GEO:"geo",BINNED:"binned"},c={CONTINUOUS:"continuous"},f={MEASURE:"measure",DIMENSION:"dimension"},l={NORMAL:"normal",INVERSE:"inverse",ALL:"all"},s={SUM:"sum",AVG:"avg",MIN:"min",MAX:"max",FIRST:"first",LAST:"last",COUNT:"count",STD:"std"};function d(e){return e instanceof Date?e:new Date(e)}function p(e){return e<10?"0"+e:e}function h(e){this.format=e,this.dtParams=void 0,this.nativeDate=void 0}RegExp.escape=function(e){return e.replace(/[-[\]{}()*+?.,\\^$|#\s]/g,"\\$&")},h.TOKEN_PREFIX="%",h.DATETIME_PARAM_SEQUENCE={YEAR:0,MONTH:1,DAY:2,HOUR:3,MINUTE:4,SECOND:5,MILLISECOND:6},h.defaultNumberParser=function(e){return function(t){var n;return isFinite(n=parseInt(t,10))?n:e}},h.defaultRangeParser=function(e,t){return function(n){var r,a=void 0;if(!n)return t;var i=n.toLowerCase();for(a=0,r=e.length;aa.getFullYear()&&(t=""+(i-1)+r),d(t).getFullYear()},formatter:function(e){var t=d(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return d(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[i+1],-1!==r.indexOf(o)&&a.push({index:i,token:o});return a},h.formatAs=function(e,t){var n,r=d(e),a=h.findTokens(t),i=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=a.length;l=0;d--)(f=i[d].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(d=0;d0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],a=+(n[1]||n[0]);if(a>=r)for(var i=r;i<=a;i+=1)t(i)}))}var R=function(){function e(e,t){for(var n=0;n=(i=e[a=n+Math.floor((r-n)/2)]).start&&t=i.end?n=a+1:t3&&void 0!==arguments[3]&&arguments[3],a=arguments.length>4&&void 0!==arguments[4]?arguments[4]:U.CROSS,i=[],o=[],u=n||H,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,d=c.name+"."+f.name,p=Y(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=_({},e.schema());-1===p.indexOf(t.name)||r||(t.name=c.name+"."+t.name),i.push(t)})),f.fields.forEach((function(e){var t=_({},e.schema());-1!==p.indexOf(t.name)?r||(t.name=f.name+"."+t.name,i.push(t)):i.push(t)})),T(e._rowDiffset,(function(n){var d=!1,h=void 0;T(t._rowDiffset,(function(v){var m=[],y={};y[l]={},y[s]={},c.fields.forEach((function(e){m.push(e.partialField.data[n]),y[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==p.indexOf(e.schema().name)&&r||m.push(e.partialField.data[v]),y[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var g=At(y[l]),b=At(y[s]);if(u(g,b,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var w={};m.forEach((function(e,t){w[i[t].name]=e})),d&&U.CROSS!==a?o[h]=w:(o.push(w),d=!0,h=n)}else if((a===U.LEFTOUTER||a===U.RIGHTOUTER)&&!d){var O={},_=c.fields.length-1;m.forEach((function(e,t){O[i[t].name]=t<=_?e:null})),d=!0,h=n,o.push(O)}}))})),new on(o,i,{name:d})}function J(e,t){var n=""+e,r=""+t;return nr?1:0}function z(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J;return e.length>1&&function e(t,n,r,a){if(r===n)return t;var i=n+Math.floor((r-n)/2);return e(t,n,i,a),e(t,i+1,r,a),function(e,t,n,r,a){for(var i=e,o=[],u=t;u<=r;u+=1)o[u]=i[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(i[l]=o[f],f+=1):f>r?(i[l]=o[c],c+=1):a(o[c],o[f])<=0?(i[l]=o[c],c+=1):(i[l]=o[f],f+=1)}(t,n,i,r,a),t}(e,0,e.length-1,t),e}var K=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function W(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function q(e,t){var n=new Map,r=[];return e.forEach((function(e){var a=e[t];n.has(a)?r[n.get(a)][1].push(e):(r.push([a,[e]]),n.set(a,r.length-1))})),r}function Z(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,a){return t[r]=e[1].map((function(e){return e[n[a].index]})),t}),r),r}function $(e,t,n){for(var r=void 0,a=void 0,i=void 0,o=n.length-1;o>=0;o--)r=n[o][0],a=n[o][1],(i=Ht(t,r))&&("function"==typeof a?z(e,(function(e,t){return a(e[i.index],t[i.index])})):E(a)?function(){var n=q(e,i.index),r=a[a.length-1],o=a.slice(0,a.length-1),u=o.map((function(e){return Ht(t,e)}));n.forEach((function(e){e.push(Z(e,o,u))})),z(n,(function(e,t){var n=e[2],a=t[2];return r(n,a)})),e.length=0,n.forEach((function(t){e.push.apply(e,W(t[1]))}))}():function(){var t=X(i,a);z(e,(function(e,n){return t(e[i.index],n[i.index])}))}())}var Q,ee=function e(t,n,r,a){if(0===t.length)return n;var i=t[0],o=new Map;n.reduce((function(e,t){var n=t[i.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var d=l.value,p=K(d,2),h=p[0],v=p[1],m=e(t.slice(1),v,r,a);o.set(h,m),Array.isArray(m)&&$(m,r,a)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function te(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!Ht(n,e[0])}))).length){var a=t.findIndex((function(e){return null===e[1]}));a=-1!==a?a:t.length;var i=t.slice(0,a),o=t.slice(a);$(r,n,i),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return Ht(t,e)}));var a=ee(r,e,t,n);return e.map((function(e){for(var t=0,n=a;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,i.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function ne(e,t,n,r,a){a=Object.assign({},{addUid:!1,columnWise:!1},a);var i={schema:[],data:[],uids:[]},o=a.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),a=_e.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var i=_e.resolve(t[e]);i?n[e]=i:(n[e]=a,t[e]=ge)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],d=[],p={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=Ee(e,2),n=t[0],r=t[1];if(-1!==a.indexOf(n)||i[n])switch(d.push(_({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var m=0;T(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===p[t]?(p[t]=m,h.push({}),l.forEach((function(t){h[m][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[m][t]=[u[t].partialField.data[e]]})),m+=1):s.forEach((function(n){h[p[t]][n].push(u[n].partialField.data[e])}))}));var y={},g=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=i[n](e[n],g,y)}))})),r?(r.__calculateFieldspace(),v=r):v=new yn(h,d,{name:c}),v}function je(e,t){var n=Y(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function Se(e,t){var n={},r=[],a=[],i=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!j(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){T(e._rowDiffset,(function(e){var r={},o="";a.forEach((function(n){var a=t[n].partialField.data[e];o+="-"+a,r[n]=a})),n[o]||(i.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(_({},t.schema())),a.push(t.schema().name)})),s(e,c),s(t,f),new yn(i,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,U.LEFTOUTER)}function Fe(e,t,n){return G(t,e,n,!1,U.RIGHTOUTER)}var Ne=function(){function e(e,t){for(var n=0;nn&&(n=a))})),[t,n]}}]),t}(Ve),He=function(){function e(e,t){for(var n=0;n9999?"+"+st(t,6):st(t,4))+"-"+st(e.getUTCMonth()+1,2)+"-"+st(e.getUTCDate(),2)+(i?"T"+st(n,2)+":"+st(r,2)+":"+st(a,2)+"."+st(i,3)+"Z":a?"T"+st(n,2)+":"+st(r,2)+":"+st(a,2)+"Z":r||n?"T"+st(n,2)+":"+st(r,2)+"Z":"")}var pt=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,a=[],i=e.length,o=0,u=0,c=i<=0,f=!1;function l(){if(c)return it;if(f)return f=!1,at;var t,r,a=o;if(e.charCodeAt(a)===ot){for(;o++=i?c=!0:(r=e.charCodeAt(o++))===ut?f=!0:r===ct&&(f=!0,e.charCodeAt(o)===ut&&++o),e.slice(a+1,t-1).replace(/""/g,'"')}for(;o2&&void 0!==arguments[2]?arguments[2]:{},a=arguments[3];t===L.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,_t(a))):e._derivation.push({op:t,meta:r,criteria:a})},Dt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,_t(e._ancestorDerivation).concat(_t(e._derivation)))},Ft=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=arguments[4];St(t,n,r,a),Dt(e,t)},Nt=(Ot(gt={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Ot(gt,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Ot(gt,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),gt),kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Tt=function(e,t,n){var r=[],a=[],i=wt(Nt[n].calcDiff,2),o=i[0],u=i[1];return T(e,(function(e){var n=t(e);n&&o&&kt(r,e,-1),!n&&u&&kt(a,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:a.join(",")}},Rt=function(e,t,n,r,a){var i={},o={},u={};return T(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=a[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],i[n]=-1,u[n]=c),kt(o[n],e,i[n]),i[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},Ct=function(e,t,n,r,a){var i={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e.getPartialFieldspace().fields,l=f.map((function(e){return e.formattedData()})),s=f.map((function(e){return e.data()}));return a(c,(function(e){return t(Et(f,l,s,e),e,o,i)}),u)},Mt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},It=function(e,t,n){for(var r=n(e,t,0),a=1,i=e.length;a2&&void 0!==arguments[2]?arguments[2]:{},r=[],a=n.operation||V,i=n.filterByMeasure||!1,o=Mt(e),u=o.getFieldsConfig();r=t.length?t.map((function(e){return n=void 0,r=(t=e).getData(),a=t.getFieldsConfig(),o=Object.keys(t.getFieldspace().getDimension()).filter((function(e){return e in u})),c=o.length,f=o.map((function(e){return a[e].index})),l=Object.keys(t.getFieldspace().getMeasure()).filter((function(e){return e in u})),s=t.getFieldspace().fieldsObj(),d=r.data,p=l.reduce((function(e,t){return e[t]=s[t].domain(),e}),{}),h={},n=function(e,t,n){return t[e[n]]},c&&d.forEach((function(e){var t=It(f,e,n);h[t]=1})),n=function(e,t,n){return t[e[n]].internalValue},d.length?function(e){var t=!c||h[It(o,e,n)];return i?l.every((function(t){return e[t].internalValue>=p[t][0]&&e[t].internalValue<=p[t][1]}))&&t:t}:function(){return!1};var t,n,r,a,o,c,f,l,s,d,p,h})):[function(){return!1}];return a===V?o.select((function(e){return r.every((function(t){return t(e)}))}),{saveChild:!1}):o.select((function(e){return r.some((function(t){return t(e)}))}),{saveChild:!1})},xt=function(e,t,n,r,a){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Ft(n,e,L.SELECT,{config:r},a)},Lt=function(e,t,n,r){var a=e.clone(n.saveChild),i=t;return n.mode===l.INVERSE&&(i=r.filter((function(e){return-1===t.indexOf(e)}))),a._colIdentifier=i.join(","),a.__calculateFieldspace().calculateFieldsConfig(),Ft(e,a,L.PROJECT,{projField:t,config:n,actualProjField:i},null),a},Ut=function(e,t,n,r){return t.map((function(t){return Lt(e,t,n,r)}))},Vt=function(e){if((e=_({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},Bt=function(e){return e.map((function(e){return function(e){var t=[c.CONTINUOUS],n=[u.CATEGORICAL,u.BINNED,u.TEMPORAL,u.GEO],r=e.type,a=e.subtype,i=e.name;switch(r){case f.DIMENSION:if(-1===n.indexOf(a))throw new Error("DataModel doesn't support dimension field subtype "+a+" used for "+i+" field");break;case f.MEASURE:if(-1===t.indexOf(a))throw new Error("DataModel doesn't support measure field subtype "+a+" used for "+i+" field");break;default:throw new Error("DataModel doesn't support field type "+r+" used for "+i+" field")}}(e=Vt(e)),e}))},Yt=function(e,t,n,r){n=Bt(n),r=Object.assign(Object.assign({},nt),r);var i=a[r.dataFormat];if(!i||"function"!=typeof i)throw new Error("No converter function found for "+r.dataFormat+" format");var u=i(t,n,r),c=wt(u,2),f=c[0],l=c[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,f);var s=tt(l,n,f),d=F.createNamespace(s,r.name);e._partialFieldspace=d,e._rowDiffset=l.length&&l[0].length?"0-"+(l[0].length-1):"";var p=[],h=d.fields,v=h.map((function(e){return e.data()})),m=h.map((function(e){return e.formattedData()}));return T(e._rowDiffset,(function(e){p[e]=Et(h,m,v,e)})),d._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?D(t):r.dataFormat,e},Ht=function(e,t){for(var n=0;n2&&void 0!==arguments[2]?arguments[2]:{},a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=a.nonTraversingModel,o=a.excludeModels||[];if(t!==i){var u=!o.length||-1===o.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var i=Gt(n,t);e(t,i,r,a)}))}},zt=function(e){for(;e._parent&&e._derivation.find((function(e){return e.op!==L.GROUPBY}));)e=e._parent;return e},Kt=function(e){for(;e._parent;)e=e._parent;return e},Wt=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},Xt=function(e,t,n,r){var a=void 0,i=void 0,o=n.propagationNameSpace,u=n.propagateToSource,c=n.sourceId,f=r.propagateInterpolatedValues,l=[];if(null===e&&!0!==r.persistent)l=[{criteria:[]}],a=[];else{var s,d=Object.values(o.mutableActions);!1!==u&&(d=d.filter((function(e){return e.config.sourceId!==c})));var p=d.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})).map((function(e){return e.config.criteria})),h=[];if(!1!==u){var v=Object.values(o.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==c&&(h.push(e.model),(a=v.filter((function(t){return t!==e})).map((function(e){return e.config.criteria}))).length&&l.push({criteria:a,models:e.model,path:Wt(e.model)}))}))}a=(s=[]).concat.apply(s,[].concat(_t(p),[e])).filter((function(e){return null!==e})),l.push({criteria:a,excludeModels:[].concat(h,_t(r.excludeModels||[]))})}var m=t.model,y=Object.assign({sourceIdentifiers:e,propagationSourceId:c},r),g=t.groupByModel;f&&g&&(i=Pt(g,a,{filterByMeasure:f}),Jt(g,i,y)),l.forEach((function(e){var t=Pt(m,e.criteria),n=e.path;if(n){var r=function(e,t){for(var n=0,r=t.length;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,a=t.isMutableAction,i=t.criteria,o=t.action+"-"+t.sourceId;r=a?e.mutableActions:e.immutableActions,null===i?delete r[o]:r[o]={model:n,config:t}},$t=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,_t(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},Qt=function(e,t){return e.numberFormat?e.numberFormat()(t):t},en=function(){function e(e,t){for(var n=0;n1?(a=e.clone(r.saveChild),xt(a,u[c[1]],e,n,t),[o,a]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),a=Object.keys(r),i=t.mode,o=$t(e,a,r),u=void 0;i===l.ALL?u=[Lt(this,o,{mode:l.NORMAL,saveChild:t.saveChild},a),Lt(this,o,{mode:l.INVERSE,saveChild:t.saveChild},a)]:u=Lt(this,o,t,a);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),nn=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,a=!1,i=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){a=!0,i=e}finally{try{!r&&u.return&&u.return()}finally{if(a)throw i}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},rn=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),a=[this,e,t],i=Ae.apply(void 0,a);return Ft(this,i,L.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:_e.defaultReducer()},t),n.saveChild?i.setParent(this):i.setParent(null),i}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),a=[r].concat(n.data),i=new this.constructor(a,n.schema,{dataFormat:"DSVArr"});return Ft(this,i,L.SORT,t,e),t.saveChild?i.setParent(this):i.setParent(null),i}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),a=r[0].length,i=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(i=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new k(a[n],i[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=Vt(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var a=this.getFieldsConfig(),i=t.slice(0,t.length-1),o=t[t.length-1];if(a[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=i.map((function(e){var t=a[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},d=function(){return r.detachedRoot()},p=[];T(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));p[e]=o.apply(void 0,an(t).concat([e,d,s]))}));var h=tt([p],[e],[e.name]),v=nn(h,1)[0];return c.addField(v),Ft(this,c,L.CAL_VAR,{config:e,fields:i},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=t.isMutableAction,i=t.sourceId,o=t.payload,u=Kt(this),c=u._propagationNameSpace,f=zt(this),l={groupByModel:f,model:u};return n&&Zt(c,t,this),Xt(e,l,{propagationNameSpace:c,sourceId:i},Object.assign({payload:o},t)),a&&qt(c,l,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var a=function(e,t,n){var r=n.buckets,a=n.binsCount,i=n.binSize,o=n.start,u=n.end,c=e.domain(),f=I(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var d=[],p=0;p2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],a=r.saveChild,i=e.getFieldspace().fieldsObj(),o=Ct(e.clone(a),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},a=this.getFieldsConfig(),i=Object.keys(a),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=$t([].concat(an(e),an(t)),i,a)})),Ut(this,o,n,i)}}],[{key:"configureInvalidAwareTypes",value:function(e){return M.invalidAwareVals(e)}},{key:"Reducers",get:function(){return _e}}]),t}(tn),un=ye.sum,cn=ye.avg,fn=ye.min,ln=ye.max,sn=ye.first,dn=ye.last,pn=ye.count,hn=ye.std,vn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,a=void 0,i=[];return t.forEach((function(e){r=e(r),i.push.apply(i,B(r._derivation)),a||(a=r)})),a&&a!==r&&a.dispose(),r._ancestorDerivation=[],Ft(e,r,L.COMPOSE,null,i),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;ni.getFullYear()&&(t=""+(a-1)+r),p(t).getFullYear()},formatter:function(e){var t=p(e).getFullYear().toString(),n=void 0;return t&&(n=t.length,t=t.substring(n-2,n)),t}},Y:{name:"Y",index:0,extract:function(){return"(\\d{4})"},parser:h.defaultNumberParser(),formatter:function(e){return p(e).getFullYear().toString()}}}},h.getTokenFormalNames=function(){var e=h.getTokenDefinitions();return{HOUR:e.H,HOUR_12:e.l,AMPM_UPPERCASE:e.p,AMPM_LOWERCASE:e.P,MINUTE:e.M,SECOND:e.S,SHORT_DAY:e.a,LONG_DAY:e.A,DAY_OF_MONTH:e.e,DAY_OF_MONTH_CONSTANT_WIDTH:e.d,SHORT_MONTH:e.b,LONG_MONTH:e.B,MONTH_OF_YEAR:e.m,SHORT_YEAR:e.y,LONG_YEAR:e.Y}},h.tokenResolver=function(){var e=h.getTokenDefinitions(),t=function(){for(var e=0,t=void 0,n=void 0,r=arguments.length;e=0;)o=e[a+1],-1!==r.indexOf(o)&&i.push({index:a,token:o});return i},h.formatAs=function(e,t){var n,r=p(e),i=h.findTokens(t),a=h.getTokenDefinitions(),o=String(t),u=h.TOKEN_PREFIX,c=void 0,f=void 0,l=void 0;for(l=0,n=i.length;l=0;p--)(f=a[p].index)+1!==s.length-1?(void 0===u&&(u=s.length),l=s.substring(f+2,u),s=s.substring(0,f+2)+RegExp.escape(l)+s.substring(u,s.length),u=f):u=f;for(p=0;p0&&e.split(",").forEach((function(e){var n=e.split("-"),r=+n[0],i=+(n[1]||n[0]);if(i>=r)for(var a=r;a<=i;a+=1)t(a)}))}var R=function(){function e(e,t){for(var n=0;n=(a=e[i=n+Math.floor((r-n)/2)]).start&&t=a.end?n=i+1:t3&&void 0!==arguments[3]&&arguments[3],i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:V.CROSS,a=[],o=[],u=n||J,c=e.getFieldspace(),f=t.getFieldspace(),l=c.name,s=f.name,p=c.name+"."+f.name,d=H(c,f);if(l===s)throw new Error("DataModels must have different alias names");return c.fields.forEach((function(e){var t=O({},e.schema());-1===d.indexOf(t.name)||r||(t.name=c.name+"."+t.name),a.push(t)})),f.fields.forEach((function(e){var t=O({},e.schema());-1!==d.indexOf(t.name)?r||(t.name=f.name+"."+t.name,a.push(t)):a.push(t)})),N(e._rowDiffset,(function(n){var p=!1,h=void 0;N(t._rowDiffset,(function(v){var y=[],m={};m[l]={},m[s]={},c.fields.forEach((function(e){y.push(e.partialField.data[n]),m[l][e.name()]={rawValue:e.partialField.data[n],formattedValue:e.formattedData()[n]}})),f.fields.forEach((function(e){-1!==d.indexOf(e.schema().name)&&r||y.push(e.partialField.data[v]),m[s][e.name()]={rawValue:e.partialField.data[v],formattedValue:e.formattedData()[v]}}));var b=Bt(m[l]),g=Bt(m[s]);if(u(b,g,(function(){return e.detachedRoot()}),(function(){return t.detachedRoot()}),{})){var _={};y.forEach((function(e,t){_[a[t].name]=e})),p&&V.CROSS!==i?o[h]=_:(o.push(_),p=!0,h=n)}else if((i===V.LEFTOUTER||i===V.RIGHTOUTER)&&!p){var w={},O=c.fields.length-1;y.forEach((function(e,t){w[a[t].name]=t<=O?e:null})),p=!0,h=n,o.push(w)}}))})),new Sn(o,a,{name:p})}function z(e,t){var n=""+e,r=""+t;return nr?1:0}function K(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:z;return e.length>1&&function e(t,n,r,i){if(r===n)return t;var a=n+Math.floor((r-n)/2);return e(t,n,a,i),e(t,a+1,r,i),function(e,t,n,r,i){for(var a=e,o=[],u=t;u<=r;u+=1)o[u]=a[u];for(var c=t,f=n+1,l=t;l<=r;l+=1)c>n?(a[l]=o[f],f+=1):f>r?(a[l]=o[c],c+=1):i(o[c],o[f])<=0?(a[l]=o[c],c+=1):(a[l]=o[f],f+=1)}(t,n,a,r,i),t}(e,0,e.length-1,t),e}var W=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function X(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);tt?1:-1}:function(e,t){return(e=""+e)===(t=""+t)?0:e>t?-1:1}}return n}(e.type,n)}function Z(e,t){var n=new Map,r=[];return e.forEach((function(e){var i=e[t];n.has(i)?r[n.get(i)][1].push(e):(r.push([i,[e]]),n.set(i,r.length-1))})),r}function $(e,t,n){var r={label:e[0]};return t.reduce((function(t,r,i){return t[r]=e[1].map((function(e){return e[n[i].index]})),t}),r),r}function Q(e,t,n){for(var r=void 0,i=void 0,a=void 0,o=n.length-1;o>=0;o--)r=n[o][0],i=n[o][1],(a=sn(t,r))&&("function"==typeof i?K(e,(function(e,t){return i(e[a.index],t[a.index])})):E(i)?function(){var n=Z(e,a.index),r=i[i.length-1],o=i.slice(0,i.length-1),u=o.map((function(e){return sn(t,e)}));n.forEach((function(e){e.push($(e,o,u))})),K(n,(function(e,t){var n=e[2],i=t[2];return r(n,i)})),e.length=0,n.forEach((function(t){e.push.apply(e,X(t[1]))}))}():function(){var t=q(a,i);K(e,(function(e,n){return t(e[a.index],n[a.index])}))}())}var ee,te=function e(t,n,r,i){if(0===t.length)return n;var a=t[0],o=new Map;n.reduce((function(e,t){var n=t[a.index];return e.has(n)?e.get(n).push(t):e.set(n,[t]),e}),o);var u=!0,c=!1,f=void 0;try{for(var l,s=o[Symbol.iterator]();!(u=(l=s.next()).done);u=!0){var p=l.value,d=W(p,2),h=d[0],v=d[1],y=e(t.slice(1),v,r,i);o.set(h,y),Array.isArray(y)&&Q(y,r,i)}}catch(e){c=!0,f=e}finally{try{!u&&s.return&&s.return()}finally{if(c)throw f}}return o};function ne(e,t){var n=e.schema,r=e.data;if(0!==(t=t.filter((function(e){return!!sn(n,e[0])}))).length){var i=t.findIndex((function(e){return null===e[1]}));i=-1!==i?i:t.length;var a=t.slice(0,i),o=t.slice(i);Q(r,n,a),r=function(e,t,n,r){if(0===(n=n.filter((function(e){return null!==e[1]||(r.push(e[0]),!1)}))).length)return e;r=r.map((function(e){return sn(t,e)}));var i=te(r,e,t,n);return e.map((function(e){for(var t=0,n=i;!Array.isArray(n);)n=n.get(e[r[t++].index]);return n.shift()}))}(r,n,o,a.map((function(e){return e[0]}))),e.uids=r.map((function(e){return e.pop()})),e.data=r}}function re(e,t,n,r,i){i=Object.assign({},{addUid:!1,columnWise:!1},i);var a={schema:[],data:[],uids:[]},o=i.addUid,u=r&&r.length>0,c=[];if(n.split(",").forEach((function(t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n={},r=e.getFieldspace().getMeasure(),i=Ee.defaultReducer();return Object.keys(r).forEach((function(e){"string"!=typeof t[e]&&(t[e]=r[e].defAggFn());var a=Ee.resolve(t[e]);a?n[e]=a:(n[e]=i,t[e]=ge)})),n}(e,n),o=e.getFieldspace(),u=o.fieldsObj(),c=o.name,l=[],s=[],p=[],d={},h=[],v=void 0;Object.entries(u).forEach((function(e){var t=je(e,2),n=t[0],r=t[1];if(-1!==i.indexOf(n)||a[n])switch(p.push(O({},r.schema())),r.schema().type){case f.MEASURE:s.push(n);break;default:case f.DIMENSION:l.push(n)}}));var y=0;N(e._rowDiffset,(function(e){var t="";l.forEach((function(n){t=t+"-"+u[n].partialField.data[e]})),void 0===d[t]?(d[t]=y,h.push({}),l.forEach((function(t){h[y][t]=u[t].partialField.data[e]})),s.forEach((function(t){h[y][t]=[u[t].partialField.data[e]]})),y+=1):s.forEach((function(n){h[d[t]][n].push(u[n].partialField.data[e])}))}));var m={},b=function(){return e.detachedRoot()};return h.forEach((function(e){var t=e;s.forEach((function(n){t[n]=a[n](e[n],b,m)}))})),r?(r.__calculateFieldspace(),v=r):v=new xn(h,p,{name:c}),v}function Se(e,t){var n=H(e.getFieldspace(),t.getFieldspace());return function(e,t){var r=!0;return n.forEach((function(n){r=!(e[n].internalValue!==t[n].internalValue||!r)})),r}}function ke(e,t){var n={},r=[],i=[],a=[],o=e.getFieldspace(),u=t.getFieldspace(),c=o.fieldsObj(),f=u.fieldsObj(),l=o.name+" union "+u.name;if(!A(e._colIdentifier.split(",").sort(),t._colIdentifier.split(",").sort()))return null;function s(e,t){N(e._rowDiffset,(function(e){var r={},o="";i.forEach((function(n){var i=t[n].partialField.data[e];o+="-"+i,r[n]=i})),n[o]||(a.push(r),n[o]=!0)}))}return e._colIdentifier.split(",").forEach((function(e){var t=c[e];r.push(O({},t.schema())),i.push(t.schema().name)})),s(e,c),s(t,f),new xn(a,r,{name:l})}function De(e,t,n){return G(e,t,n,!1,V.LEFTOUTER)}function Te(e,t,n){return G(t,e,n,!1,V.RIGHTOUTER)}var Fe=function(){function e(e,t){for(var n=0;nr&&(r=i))})),[n,r]},Ke=function(){function e(e,t){for(var n=0;n9999?"+"+bt(t,6):bt(t,4))+"-"+bt(e.getUTCMonth()+1,2)+"-"+bt(e.getUTCDate(),2)+(a?"T"+bt(n,2)+":"+bt(r,2)+":"+bt(i,2)+"."+bt(a,3)+"Z":i?"T"+bt(n,2)+":"+bt(r,2)+":"+bt(i,2)+"Z":r||n?"T"+bt(n,2)+":"+bt(r,2)+"Z":"")}var _t=function(e){var t=new RegExp('["'+e+"\n\r]"),n=e.charCodeAt(0);function r(e,t){var r,i=[],a=e.length,o=0,u=0,c=a<=0,f=!1;function l(){if(c)return pt;if(f)return f=!1,st;var t,r,i=o;if(e.charCodeAt(i)===dt){for(;o++=a?c=!0:(r=e.charCodeAt(o++))===ht?f=!0:r===vt&&(f=!0,e.charCodeAt(o)===ht&&++o),e.slice(i+1,t-1).replace(/""/g,'"')}for(;o0&&void 0!==arguments[0]?arguments[0]:[];return t.forEach((function(t){return e.store.set(t.type,t)})),this.store}},{key:"register",value:function(e){return e instanceof lt?(this.store.set(e.type,e),this):null}},{key:"unregister",value:function(e){return this.store.delete(e.type),this}},{key:"get",value:function(e){return this.store.has(e)?this.store.get(e):null}}]),e}(),xt=function(){var e=null;return e||(e=new It)}(),Ut=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")};function Lt(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Vt(e){if(Array.isArray(e)){for(var t=0,n=Array(e.length);t2&&void 0!==arguments[2]?arguments[2]:{},i=arguments[3];t===L.COMPOSE?(e._derivation.length=0,(n=e._derivation).push.apply(n,Vt(i))):e._derivation.push({op:t,meta:r,criteria:i})},Jt=function(e,t){var n;(n=t._ancestorDerivation).push.apply(n,Vt(e._ancestorDerivation).concat(Vt(e._derivation)))},Gt=function(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments[4];Ht(t,n,r,i),Jt(e,t)},zt=(Lt(Ct={},l.NORMAL,{diffIndex:["rowDiffset"],calcDiff:[!0,!1]}),Lt(Ct,l.INVERSE,{diffIndex:["rejectRowDiffset"],calcDiff:[!1,!0]}),Lt(Ct,l.ALL,{diffIndex:["rowDiffset","rejectRowDiffset"],calcDiff:[!0,!0]}),Ct),Kt=function(e,t,n){if(-1!==n&&t===n+1){var r=e.length-1;e[r]=e[r].split("-")[0]+"-"+t}else e.push(""+t)},Wt=function(e,t,n){var r=[],i=[],a=Ut(zt[n].calcDiff,2),o=a[0],u=a[1];return N(e,(function(e){var n=t(e);n&&o&&Kt(r,e,-1),!n&&u&&Kt(i,e,-1)})),{rowDiffset:r.join(","),rejectRowDiffset:i.join(",")}},Xt=function(e,t,n,r,i){var a={},o={},u={};return N(e,(function(e){if(t(e)){var n="",c={keys:{}};r.forEach((function(t){var r=i[t].partialField.data[e];n=n+"-"+r,c.keys[t]=r})),void 0===o[n]&&(o[n]=[],a[n]=-1,u[n]=c),Kt(o[n],e,a[n]),a[n]=e}})),{splitRowDiffset:o,dimensionMap:u}},qt=function(e,t,n,r,i){var a={},o=function(){return r.detachedRoot()},u=n.mode,c=e._rowDiffset,f=e._partialFieldspace._cachedValueObjects;return i(c,(function(e){return t(f[e],e,o,a)}),u)},Zt=function(e){var t=e.clone(!1),n=e.getPartialFieldspace();return t._colIdentifier=n.fields.map((function(e){return e.name()})).join(","),n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,t.__calculateFieldspace().calculateFieldsConfig(),t},$t=function(e,t,n,r){for(var i=n(e,t,0,r),a=1,o=e.length;a=t[0]&&e<=t[1]}))},tn=(Lt(Mt={},c.CONTINUOUS,en),Lt(Mt,u.TEMPORAL,en),Mt),nn=function(e,t,n){return tn[n](e,t)},rn=function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=[],i=n.operation||B,a=n.filterByDim,o=void 0===a||a,u=n.filterByMeasure,c=void 0!==u&&u,l=n.clone,s=void 0===l||l,p=s?Zt(e):e,d=p.getFieldsConfig();r=t.length?t.map((function(e){return function(e){var t=e.criteria,n=void 0===t?{}:t,r=n.identifiers,i=void 0===r?[[],[]]:r,a=n.range,u=Ut(i,2),l=u[0],s=void 0===l?[]:l,p=u[1],h=void 0===p?[]:p,v=s.reduce((function(e,t,n){return e[t]=n,e}),{}),y=(s=s.filter((function(e){return e in d&&d[e].def.type===f.DIMENSION||e===U}))).length,m={};if(y)for(var b=function(e,t){var n=i[e],r=""+s.map((function(e){var t=v[e];return n[t]}));m[r]=1},g=1,_=i.length;g<_;g++)b(g);var w=Object.keys(a||{}).filter((function(e){return e in d})),O=h.length||w.length;return c||(w=w.filter((function(e){return d[e].def.type!==f.MEASURE}))),o||(w=w.filter((function(e){return d[e].def.type!==f.DIMENSION}))),O?function(e,t){var n=!0;return o&&(n=!y||m[$t(s,e,Qt,t)]),w.every((function(t){var n=e[t].internalValue;return nn(n,a[t],d[t].def.subtype)}))&&n}:function(){return!1}}(e)})):[function(){return!1}];return i===B?p.select((function(e,t){return r.every((function(n){return n(e,t)}))}),{saveChild:!1}):p.select((function(e,t){return r.some((function(n){return n(e,t)}))}),{saveChild:!1})},an=function(e,t,n,r,i){e._rowDiffset=t,e.__calculateFieldspace().calculateFieldsConfig(),Gt(n,e,L.SELECT,{config:r},i)},on=function(e,t,n,r){var i=e.clone(n.saveChild),a=t;return n.mode===l.INVERSE&&(a=r.filter((function(e){return-1===t.indexOf(e)}))),i._colIdentifier=a.join(","),i.__calculateFieldspace().calculateFieldsConfig(),Gt(e,i,L.PROJECT,{projField:t,config:n,actualProjField:a},null),i},un=function(e,t,n,r){return t.map((function(t){return on(e,t,n,r)}))},cn=function(e){if((e=O({},e)).type||(e.type=f.DIMENSION),!e.subtype)switch(e.type){case f.MEASURE:e.subtype=c.CONTINUOUS;break;default:case f.DIMENSION:e.subtype=u.CATEGORICAL}return e},fn=function(e){return e.map((function(e){return function(e){var t=e.type,n=e.subtype,r=e.name;if(t!==f.DIMENSION&&t!==f.MEASURE)throw new Error("DataModel doesn't support field type "+t+" used for "+r+" field");if(!ot.has(n))throw new Error("DataModel doesn't support measure field subtype "+n+" used for "+r+" field")}(e=cn(e)),e}))},ln=function(e,t,n,r){n=fn(n),r=Object.assign(Object.assign({},ct),r);var i=xt.get(r.dataFormat);if(!i)throw new Error("No converter function found for "+r.dataFormat+" format");var a=i.convert(t,n,r),u=Ut(a,2),c=u[0],f=u[1];!function(e,t){e.forEach((function(e){var n=e.as;if(n){var r=t.indexOf(e.name);t[r]=n,e.name=n,delete e.as}}))}(n,c);var l=ut(f,n,c),s=D.createNamespace(l,r.name);e._partialFieldspace=s,e._rowDiffset=f.length&&f[0].length?"0-"+(f[0].length-1):"";var p=[],d=s.fields,h=d.map((function(e){return e.data()})),v=d.map((function(e){return e.formattedData()}));return N(e._rowDiffset,(function(e){p[e]=function(e,t,n,r){var i={},a=!0,o=!1,u=void 0;try{for(var c,f=e.entries()[Symbol.iterator]();!(a=(c=f.next()).done);a=!0){var l=c.value,s=Ut(l,2),p=s[0],d=s[1];i[d.name()]=new F(t[p][r],n[p][r],d)}}catch(e){o=!0,u=e}finally{try{!a&&f.return&&f.return()}finally{if(o)throw u}}return i}(d,v,h,e)})),s._cachedValueObjects=p,e._colIdentifier=n.map((function(e){return e.name})).join(),e._dataFormat=r.dataFormat===o.AUTO?k(t):r.dataFormat,e},sn=function(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:[];e._parent;)t.push(e),e=e._parent;return t},yn=function(e,t,n,r){var i=void 0,a=n.propagationNameSpace,o=n.propagateToSource,u=n.sourceId,c=function(e){var t=e.config,n=e.model,r=t.criteria,i=void 0;return null!==r&&r.fields.some((function(e){return e.type===f.MEASURE}))&&(i=dn(n)),Object.assign({},t,{groupedModel:i})},l=[];if(null===e)l=[{criteria:[]}],i=[];else{var s,p=Object.values(a.mutableActions);!1!==o&&(p=p.filter((function(e){return e.config.sourceId!==u})));var d=p.filter((function(e){return(r.filterFn||function(){return!0})(e,r)})),h=[];if(!1!==o){var v=Object.values(a.mutableActions);v.forEach((function(e){var t=e.config;!1===t.applyOnSource&&t.action===r.action&&t.sourceId!==u&&(h.push(e.model),(i=v.filter((function(t){return t!==e})).map(c)).length&&l.push({criteria:i,models:e.model,path:vn(e.model)}))}))}i=(s=[]).concat.apply(s,[].concat(Vt(d.map(c)),[{criteria:e,groupedModel:null!==e&&e.fields.some((function(e){return e.type===f.MEASURE}))?dn(n.propagationSource):null}])).filter((function(e){return null!==e})),l.push({criteria:i,excludeModels:[].concat(h,Vt(r.excludeModels||[]))})}var y=t.model,m=Object.assign({sourceIdentifiers:e,propagationSourceId:u},r);l.forEach((function(e){var t=e.criteria,n=rn(y,t,{filterByMeasure:!!t.find((function(e){return e.groupedModel===y}))}),r=e.path;if(r){var i=function(e,t){for(var n=0,r=t.length;n2&&void 0!==arguments[2]?arguments[2]:{},i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},a=i.excludeModels||[],o=i.criteria,u=!a.length||-1===a.indexOf(t);u&&t.handlePropagation(n,r);var c=t._children;c.forEach((function(t){var a=o.filter((function(e){return e.groupedModel===t})),u=pn(n,t);a.length&&(u=rn(u,a,{filterByDim:!1,filterByMeasure:!0,clone:!1})),e(t,u,r,i)}))}(y,n,m,{excludeModels:e.excludeModels,criteria:t})}))},mn=function(e,t,n){var r=e.immutableActions;for(var i in r){var a=r[i],o=a.config,u=n.config.sourceId,c=!n.propConfig.filterImmutableAction||n.propConfig.filterImmutableAction(o,n.config);if(o.sourceId!==u&&c){var f=o.criteria;yn(f,{model:t,groupByModel:dn(a.model)},{propagationNameSpace:e,propagateToSource:!1,sourceId:u,propagationSource:a.model},o)}}},bn=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=void 0,i=t.isMutableAction,a=t.criteria,o=t.action+"-"+t.sourceId;r=i?e.mutableActions:e.immutableActions,null===a?delete r[o]:r[o]={model:n,config:t}},gn=function(e,t,n){var r=e.reduce((function(e,r){return"RegExp"===r.constructor.name?e.push.apply(e,Vt(t.filter((function(e){return-1!==e.search(r)})))):r in n&&e.push(r),e}),[]);return Array.from(new Set(r)).map((function(e){return e.trim()}))},_n=function(e,t){return e.numberFormat?e.numberFormat()(t):t},wn=function(){function e(e,t){for(var n=0;n1?(i=e.clone(r.saveChild),an(i,u[c[1]],e,n,t),[o,i]):o}(this,e,t,{saveChild:t.saveChild})}},{key:"isEmpty",value:function(){return!this._rowDiffset.length||!this._colIdentifier.length}},{key:"clone",value:function(){var e=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],t=new this.constructor(this);return e?t.setParent(this):t.setParent(null),t}},{key:"project",value:function(e,t){var n={mode:l.NORMAL,saveChild:!0};t=Object.assign({},n,t);var r=this.getFieldsConfig(),i=Object.keys(r),a=t.mode,o=gn(e,i,r),u=void 0;a===l.ALL?u=[on(this,o,{mode:l.NORMAL,saveChild:t.saveChild},i),on(this,o,{mode:l.INVERSE,saveChild:t.saveChild},i)]:u=on(this,o,t,i);return u}},{key:"getFieldsConfig",value:function(){return this._fieldConfig}},{key:"calculateFieldsConfig",value:function(){return this._fieldConfig=this._fieldspace.fields.reduce((function(e,t,n){return e[t.name()]={index:n,def:t.schema()},e}),{}),this}},{key:"dispose",value:function(){this._parent&&this._parent.removeChild(this),this._parent=null,this._children.forEach((function(e){e._parent=null})),this._children=[]}},{key:"removeChild",value:function(e){var t=this._children.findIndex((function(t){return t===e}));-1===t||this._children.splice(t,1)}},{key:"setParent",value:function(e){this._parent&&this._parent.removeChild(this),this._parent=e,e&&e._children.push(this)}},{key:"getParent",value:function(){return this._parent}},{key:"getChildren",value:function(){return this._children}},{key:"getDerivations",value:function(){return this._derivation}},{key:"getAncestorDerivations",value:function(){return this._ancestorDerivation}}]),e}(),En=function(e,t){if(Array.isArray(e))return e;if(Symbol.iterator in Object(e))return function(e,t){var n=[],r=!0,i=!1,a=void 0;try{for(var o,u=e[Symbol.iterator]();!(r=(o=u.next()).done)&&(n.push(o.value),!t||n.length!==t);r=!0);}catch(e){i=!0,a=e}finally{try{!r&&u.return&&u.return()}finally{if(i)throw a}}return n}(e,t);throw new TypeError("Invalid attempt to destructure non-iterable instance")},jn=function(){function e(e,t){for(var n=0;n1&&void 0!==arguments[1]?arguments[1]:{},n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{saveChild:!0},r=""+e.join(),i=[this,e,t],a=Ae.apply(void 0,i);return Gt(this,a,L.GROUPBY,{fieldsArr:e,groupByString:r,defaultReducer:Ee.defaultReducer()},t),n.saveChild?a.setParent(this):a.setParent(null),a}},{key:"sort",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{saveChild:!1},n=this.getData({order:"row",sort:e}),r=n.schema.map((function(e){return e.name})),i=[r].concat(n.data),a=new this.constructor(i,n.schema,{dataFormat:"DSVArr"});return Gt(this,a,L.SORT,t,e),t.saveChild?a.setParent(this):a.setParent(null),a}},{key:"serialize",value:function(e,t){e=e||this._dataFormat,t=Object.assign({},{fieldSeparator:","},t);var n=this.getFieldspace().fields,r=n.map((function(e){return e.formattedData()})),i=r[0].length,a=void 0,u=void 0,c=void 0;if(e===o.FLAT_JSON)for(a=[],u=0;u=0&&(n.fields[o]=e)}else n.fields.push(e),r.forEach((function(t,n){t[e.name()]=new F(i[n],a[n],e)}));return n._cachedFieldsObj=null,n._cachedDimension=null,n._cachedMeasure=null,this.__calculateFieldspace().calculateFieldsConfig(),this}},{key:"calculateVariable",value:function(e,t,n){var r=this;e=cn(e),n=Object.assign({},{saveChild:!0,replaceVar:!1},n);var i=this.getFieldsConfig(),a=t.slice(0,t.length-1),o=t[t.length-1];if(i[e.name]&&!n.replaceVar)throw new Error(e.name+" field already exists in datamodel");var u=a.map((function(e){var t=i[e];if(!t)throw new Error(e+" is not a valid column name.");return t.index})),c=this.clone(n.saveChild),f=c.getFieldspace().fields,l=u.map((function(e){return f[e]})),s={},p=function(){return r.detachedRoot()},d=[];N(c._rowDiffset,(function(e){var t=l.map((function(t){return t.partialField.data[e]}));d[e]=o.apply(void 0,An(t).concat([e,p,s]))}));var h=ut([d],[e],[e.name]),v=En(h,1)[0];return c.addField(v),Gt(this,c,L.CAL_VAR,{config:e,fields:a},o),c}},{key:"propagate",value:function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=arguments[2],r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=t.isMutableAction,a=t.sourceId,o=t.payload,u=hn(this),c=u._propagationNameSpace,f=dn(this),l={groupByModel:f,model:u};return n&&bn(c,t,this),yn(e,l,{propagationNameSpace:c,sourceId:a,propagationSource:this},Object.assign({payload:o},t)),i&&mn(c,u,{config:t,propConfig:r}),this}},{key:"on",value:function(e,t){switch(e){case"propagation":this._onPropagation.push(t)}return this}},{key:"unsubscribe",value:function(e){switch(e){case"propagation":this._onPropagation=[]}return this}},{key:"handlePropagation",value:function(e,t){var n=this;this._onPropagation.forEach((function(r){return r.call(n,e,t)}))}},{key:"bin",value:function(e,t){var n=this.getFieldsConfig();if(!n[e])throw new Error("Field "+e+" doesn't exist");var r=t.name||e+"_binned";if(n[r])throw new Error("Field "+r+" already exists");var i=function(e,t,n){var r=n.buckets,i=n.binsCount,a=n.binSize,o=n.start,u=n.end,c=e.domain(),f=M(c,2),l=f[0],s=f[1];r||(o=0!==o&&(!o||o>l)?l:o,u=0!==u&&(!u||ul&&r.unshift(l),r[r.length-1]<=s&&r.push(s+1);for(var p=[],d=0;d2&&void 0!==arguments[2]?arguments[2]:function(e){return e},r=arguments[3],i=r.saveChild,a=e.getFieldspace().fieldsObj(),o=qt(e.clone(i),n,r,e,(function(){for(var e=arguments.length,n=Array(e),r=0;r0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments[2],r={mode:l.NORMAL,saveChild:!0},i=this.getFieldsConfig(),a=Object.keys(i),o=[[t]];return n=Object.assign({},r,n),(e=e.length?e:[[]]).forEach((function(e,n){o[n]=gn([].concat(An(e),An(t)),a,i)})),un(this,o,n,a)}}],[{key:"configureInvalidAwareTypes",value:function(e){return C.invalidAwareVals(e)}},{key:"Reducers",get:function(){return Ee}},{key:"Converters",get:function(){return xt}},{key:"FieldTypes",get:function(){return ot}}]),t}(On),kn=be.sum,Dn=be.avg,Tn=be.min,Fn=be.max,Nn=be.first,Rn=be.last,Pn=be.count,Cn=be.std,Mn={compose:function(){for(var e=arguments.length,t=Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{saveChild:!0},r=e,i=void 0,a=[];return t.forEach((function(e){r=e(r),a.push.apply(a,Y(r._derivation)),i||(i=r)})),i&&i!==r&&i.dispose(),r._ancestorDerivation=[],Gt(e,r,L.COMPOSE,null,a),n.saveChild?r.setParent(e):r.setParent(null),r}},bin:function(){for(var e=arguments.length,t=Array(e),n=0;n {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: 'uid',\n type: 'identifier'\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be\n // occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n\n return domain;\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n}\n\n","import Dimension from '../dimension';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport InvalidAwareTypes from '../../invalid-aware-types';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {Object} schema - The schema object for the corresponding field.\n */\n constructor (schema) {\n super();\n this.schema = schema;\n this._dtf = new DateTimeFormatter(this.schema.format);\n }\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum));\n }\n}\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport {\n Categorical,\n Temporal,\n Binned,\n Continuous,\n CategoricalParser,\n TemporalParser,\n BinnedParser,\n ContinuousParser,\n PartialField\n} from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n let partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new ContinuousParser());\n return new Continuous(partialField, `0-${data.length - 1}`);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.TEMPORAL:\n partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema));\n return new Temporal(partialField, `0-${data.length - 1}`);\n case DimensionSubtype.BINNED:\n partialField = new PartialField(schema.name, data, schema, new BinnedParser());\n return new Binned(partialField, `0-${data.length - 1}`);\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n default:\n partialField = new PartialField(schema.name, data, schema, new CategoricalParser());\n return new Categorical(partialField, `0-${data.length - 1}`);\n }\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n switch (schema.type) {\n case FieldType.MEASURE:\n switch (schema.subtype) {\n case MeasureSubtype.CONTINUOUS:\n return new Continuous(partialField, rowDiffset);\n default:\n return new Continuous(partialField, rowDiffset);\n }\n case FieldType.DIMENSION:\n switch (schema.subtype) {\n case DimensionSubtype.CATEGORICAL:\n return new Categorical(partialField, rowDiffset);\n case DimensionSubtype.TEMPORAL:\n return new Temporal(partialField, rowDiffset);\n case DimensionSubtype.BINNED:\n return new Binned(partialField, rowDiffset);\n default:\n return new Categorical(partialField, rowDiffset);\n }\n default:\n return new Categorical(partialField, rowDiffset);\n }\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"]\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows\n };\n}\n","import dsv from \"./dsv\";\n\nvar csv = dsv(\",\");\n\nexport var csvParse = csv.parse;\nexport var csvParseRows = csv.parseRows;\nexport var csvFormat = csv.format;\nexport var csvFormatBody = csv.formatBody;\nexport var csvFormatRows = csv.formatRows;\n","import dsv from \"./dsv\";\n\nvar tsv = dsv(\"\\t\");\n\nexport var tsvParse = tsv.parse;\nexport var tsvParseRows = tsv.parseRows;\nexport var tsvFormat = tsv.format;\nexport var tsvFormatBody = tsv.formatBody;\nexport var tsvFormatRows = tsv.formatRows;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import { columnMajor } from '../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport * as converter from './converter';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const fields = clonedDm.getPartialFieldspace().fields;\n const formattedFieldsData = fields.map(field => field.formattedData());\n const rawFieldsData = fields.map(field => field.data());\n\n const selectorHelperFn = index => selectFn(\n prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index),\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn) => {\n let key = fn(arr, data, 0);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i)}`;\n }\n return key;\n};\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const filterByMeasure = config.filterByMeasure || false;\n const clonedModel = cloneWithAllFields(model);\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => ((dataModel) => {\n let keyFn;\n const dataObj = dataModel.getData();\n const fieldsConfig = dataModel.getFieldsConfig();\n const dimensions = Object.keys(dataModel.getFieldspace().getDimension())\n .filter(d => d in modelFieldsConfig);\n const dLen = dimensions.length;\n const indices = dimensions.map(d =>\n fieldsConfig[d].index);\n const measures = Object.keys(dataModel.getFieldspace().getMeasure())\n .filter(d => d in modelFieldsConfig);\n const fieldsSpace = dataModel.getFieldspace().fieldsObj();\n const data = dataObj.data;\n const domain = measures.reduce((acc, v) => {\n acc[v] = fieldsSpace[v].domain();\n return acc;\n }, {});\n const valuesMap = {};\n\n keyFn = (arr, row, idx) => row[arr[idx]];\n if (dLen) {\n data.forEach((row) => {\n const key = getKey(indices, row, keyFn);\n valuesMap[key] = 1;\n });\n }\n\n keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue;\n return data.length ? (fields) => {\n const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true;\n\n if (filterByMeasure) {\n return measures.every(field => fields[field].internalValue >= domain[field][0] &&\n fields[field].internalValue <= domain[field][1]) && present;\n }\n return present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS];\n const supportedDimSubTypes = [\n DimensionSubtype.CATEGORICAL,\n DimensionSubtype.BINNED,\n DimensionSubtype.TEMPORAL,\n DimensionSubtype.GEO\n ];\n const { type, subtype, name } = unitSchema;\n\n switch (type) {\n case FieldType.DIMENSION:\n if (supportedDimSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`);\n }\n break;\n case FieldType.MEASURE:\n if (supportedMeasureSubTypes.indexOf(subtype) === -1) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n break;\n default:\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converterFn = converter[options.dataFormat];\n\n if (!(converterFn && typeof converterFn === 'function')) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converterFn(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const nonTraversingModel = propModelInf.nonTraversingModel;\n const excludeModels = propModelInf.excludeModels || [];\n\n if (dataModel === nonTraversingModel) {\n return;\n }\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const selectionModel = applyExistingOperationOnModel(propModel, child);\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n let propModel;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const propagateInterpolatedValues = config.propagateInterpolatedValues;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n let criterias = [];\n\n if (identifiers === null && config.persistent !== true) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n const rootGroupByModel = rootModels.groupByModel;\n if (propagateInterpolatedValues && rootGroupByModel) {\n propModel = filterPropagationModel(rootGroupByModel, criteria, {\n filterByMeasure: propagateInterpolatedValues\n });\n propagateIdentifiers(rootGroupByModel, propModel, propConfig);\n }\n\n criterias.forEach((inf) => {\n const propagationModel = filterPropagationModel(rootModel, inf.criteria);\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n nonTraversingModel: propagateInterpolatedValues && rootGroupByModel\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, rootModels, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModels, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file +{"version":3,"sources":["webpack://DataModel/webpack/universalModuleDefinition","webpack://DataModel/webpack/bootstrap","webpack://DataModel/./src/index.js","webpack://DataModel/./src/enums/data-format.js","webpack://DataModel/./src/enums/dimension-subtype.js","webpack://DataModel/./src/enums/measure-subtype.js","webpack://DataModel/./src/enums/field-type.js","webpack://DataModel/./src/enums/filtering-mode.js","webpack://DataModel/./src/enums/group-by-functions.js","webpack://DataModel/./src/utils/date-time-formatter.js","webpack://DataModel/./src/utils/column-major.js","webpack://DataModel/./src/utils/extend2.js","webpack://DataModel/./src/utils/helper.js","webpack://DataModel/./src/field-store.js","webpack://DataModel/./src/value.js","webpack://DataModel/./src/operator/row-diffset-iterator.js","webpack://DataModel/./src/invalid-aware-types.js","webpack://DataModel/./src/operator/bucket-creator.js","webpack://DataModel/./src/constants/index.js","webpack://DataModel/./src/operator/get-common-schema.js","webpack://DataModel/./src/operator/cross-product.js","webpack://DataModel/./src/operator/merge-sort.js","webpack://DataModel/./src/operator/sort.js","webpack://DataModel/./src/operator/data-builder.js","webpack://DataModel/./src/operator/difference.js","webpack://DataModel/./src/operator/group-by-function.js","webpack://DataModel/./src/utils/reducer-store.js","webpack://DataModel/./src/operator/group-by.js","webpack://DataModel/./src/operator/natural-join-filter-function.js","webpack://DataModel/./src/operator/union.js","webpack://DataModel/./src/operator/outer-join.js","webpack://DataModel/./src/fields/partial-field/index.js","webpack://DataModel/./src/fields/field/index.js","webpack://DataModel/./src/fields/dimension/index.js","webpack://DataModel/./src/fields/measure/index.js","webpack://DataModel/./src/fields/parsers/field-parser/index.js","webpack://DataModel/./src/fields/parsers/categorical-parser/index.js","webpack://DataModel/./src/fields/categorical/index.js","webpack://DataModel/./src/fields/parsers/temporal-parser/index.js","webpack://DataModel/./src/fields/helper.js","webpack://DataModel/./src/fields/temporal/index.js","webpack://DataModel/./src/fields/parsers/binned-parser/index.js","webpack://DataModel/./src/fields/binned/index.js","webpack://DataModel/./src/fields/parsers/continuous-parser/index.js","webpack://DataModel/./src/fields/continuous/index.js","webpack://DataModel/./src/fields/field-registry.js","webpack://DataModel/./src/field-creator.js","webpack://DataModel/./src/default-config.js","webpack://DataModel/./src/converter/model/dataConverter.js","webpack://DataModel/./node_modules/d3-dsv/src/dsv.js","webpack://DataModel/./src/converter/utils/dsv-arr.js","webpack://DataModel/./src/converter/utils/dsv-str.js","webpack://DataModel/./src/converter/defaultConverters/dsvStringConverter.js","webpack://DataModel/./src/converter/utils/flat-json.js","webpack://DataModel/./src/converter/defaultConverters/jsonConverter.js","webpack://DataModel/./src/converter/defaultConverters/dsvArrayConverter.js","webpack://DataModel/./src/converter/utils/auto-resolver.js","webpack://DataModel/./src/converter/defaultConverters/autoConverter.js","webpack://DataModel/./src/converter/dataConverterStore.js","webpack://DataModel/./src/helper.js","webpack://DataModel/./src/relation.js","webpack://DataModel/./src/datamodel.js","webpack://DataModel/./src/stats/index.js","webpack://DataModel/./src/export.js","webpack://DataModel/./src/operator/compose.js","webpack://DataModel/./src/operator/pure-operators.js","webpack://DataModel/./src/operator/natural-join.js"],"names":["root","factory","exports","module","define","amd","window","installedModules","__webpack_require__","moduleId","i","l","modules","call","m","c","d","name","getter","o","Object","defineProperty","enumerable","get","r","Symbol","toStringTag","value","t","mode","__esModule","ns","create","key","bind","n","object","property","prototype","hasOwnProperty","p","s","DataModel","require","default","DataFormat","FLAT_JSON","DSV_STR","DSV_ARR","AUTO","DimensionSubtype","CATEGORICAL","TEMPORAL","BINNED","MeasureSubtype","CONTINUOUS","FieldType","MEASURE","DIMENSION","FilteringMode","NORMAL","INVERSE","ALL","GROUP_BY_FUNCTIONS","SUM","AVG","MIN","MAX","FIRST","LAST","COUNT","STD","convertToNativeDate","date","Date","pad","DateTimeFormatter","format","this","dtParams","undefined","nativeDate","RegExp","escape","text","replace","TOKEN_PREFIX","DATETIME_PARAM_SEQUENCE","YEAR","MONTH","DAY","HOUR","MINUTE","SECOND","MILLISECOND","defaultNumberParser","defVal","val","parsedVal","isFinite","parseInt","defaultRangeParser","range","nVal","toLowerCase","length","getTokenDefinitions","daysDef","short","long","monthsDef","H","index","extract","parser","formatter","getHours","toString","hours","P","M","getMinutes","S","getSeconds","K","getMilliseconds","a","join","day","getDay","A","e","getDate","b","month","getMonth","B","y","result","substring","presentDate","presentYear","Math","trunc","getFullYear","year","Y","getTokenFormalNames","definitions","HOUR_12","AMPM_UPPERCASE","AMPM_LOWERCASE","SHORT_DAY","LONG_DAY","DAY_OF_MONTH","DAY_OF_MONTH_CONSTANT_WIDTH","SHORT_MONTH","LONG_MONTH","MONTH_OF_YEAR","SHORT_YEAR","LONG_YEAR","tokenResolver","defaultResolver","arg","targetParam","hourFormat24","hourFormat12","ampmLower","ampmUpper","amOrpm","isPM","findTokens","tokenPrefix","tokenLiterals","keys","occurrence","forwardChar","indexOf","push","token","formatAs","nDate","formattedStr","String","formattedVal","parse","dateTimeStamp","options","extractTokenValue","dtParamSeq","noBreak","dtParamArr","args","resolverKey","resolverParams","resolverFn","param","resolvedVal","splice","apply","checkIfOnlyYear","unshift","tokenObj","lastOccurrenceIndex","occObj","occIndex","targetText","regexFormat","tokenArr","map","obj","occurrenceLength","extractValues","match","shift","getNativeDate","Number","len","store","fields","forEach","fieldIndex","Array","from","OBJECTSTRING","objectToStrFn","objectToStr","arrayToStr","checkCyclicRef","parentArr","bIndex","extend2","obj1","obj2","skipUndef","merge","tgtArr","srcArr","item","srcVal","tgtVal","str","cRef","isArray","getUniqueId","getTime","round","random","isArrEqual","arr1","arr2","formatNumber","detectDataFormat","data","isObject","fieldStore","createNamespace","fieldArr","dataId","fieldsObj","_cachedFieldsObj","field","getMeasure","measureFields","_cachedMeasure","schema","type","getDimension","dimensionFields","_cachedDimension","Value","rawValue","formattedValue","getNumberFormattedVal","defineProperties","_value","configurable","writable","_formattedValue","_internalValue","rowDiffsetIterator","rowDiffset","callback","split","diffStr","diffStsArr","start","end","InvalidAwareTypes","config","assign","_invalidAwareValsMap","invalidAwareVals","NULL","NA","NIL","invalid","nil","null","generateBuckets","binSize","buckets","next","findBucketRange","bucketRanges","leftIdx","rightIdx","midIdx","floor","ROW_ID","DM_DERIVATIVES","SELECT","PROJECT","GROUPBY","COMPOSE","CAL_VAR","BIN","SORT","JOINS","CROSS","LEFTOUTER","RIGHTOUTER","NATURAL","FULLOUTER","LOGICAL_OPERATORS","getCommonSchema","fs1","fs2","retArr","fs1Arr","defaultFilterFn","crossProduct","dm1","dm2","filterFn","replaceCommonSchema","jointype","applicableFilterFn","dm1FieldStore","getFieldspace","dm2FieldStore","dm1FieldStoreName","dm2FieldStoreName","commonSchemaList","Error","tmpSchema","_rowDiffset","rowAdded","rowPosition","ii","tuple","userArg","partialField","formattedData","dm1Fields","prepareJoinData","dm2Fields","detachedRoot","tupleObj","cellVal","iii","defSortFn","a1","b1","mergeSort","arr","sortFn","sort","lo","hi","mid","mainArr","auxArr","resolveStrSortOrder","fDetails","strSortOrder","sortOrder","dataType","sortType","retFunc","getSortFn","groupData","hashMap","Map","groupedData","datum","fieldVal","has","set","createSortingFnArg","groupedDatum","targetFields","targetFieldDetails","label","reduce","acc","idx","applyStandardSort","sortingDetails","fieldName","sortMeta","fieldInSchema","sortingFn","slice","f","makeGroupMapAndSort","depColumns","targetCol","currRow","fVal","nMap","sortData","dataObj","filter","sDetial","groupSortingIdx","findIndex","standardSortingDetails","groupSortingDetails","detail","sortedGroupMap","row","nextMap","applyGroupSort","uids","pop","dataBuilder","colIdentifier","addUid","columnWise","retObj","reqSorting","tmpDataArr","colName","insertInd","tmpData","difference","hashTable","schemaNameArr","dm1FieldStoreFieldObj","dm2FieldStoreFieldObj","_colIdentifier","prepareDataHelper","dm","addData","hashData","schemaName","getFilteredValues","sum","filteredNumber","curr","avg","totalSum","isNaN","fnList","filteredValues","min","max","sqrt","mean","num","variance","defaultReducerName","ReducerStore","defReducer","entries","reducer","__unregister","delete","Function","reducerStore","groupBy","dataModel","reducers","existingDataModel","sFieldArr","dimensions","getFieldArr","reducerObj","measures","defaultReducer","measureName","defAggFn","reducerFn","resolve","getReducerObj","fieldStoreObj","dbName","dimensionArr","measureArr","newDataModel","rowCount","hash","_","cachedStore","cloneProvider","__calculateFieldspace","naturalJoinFilter","commonSchemaArr","retainTuple","internalValue","union","leftOuterJoin","dataModel1","dataModel2","rightOuterJoin","PartialField","_sanitize","Field","subtype","description","displayName","_params","_context","build","Dimension","_cachedDomain","calculateDataDomain","Measure","unit","numberFormat","FieldParser","CategoricalParser","isInvalid","getInvalidType","trim","Categorical","Set","domain","add","TemporalParser","_dtf","calculateContinuousDomain","POSITIVE_INFINITY","NEGATIVE_INFINITY","Temporal","_cachedMinDiff","sortedData","arrLn","minDiff","prevDatum","nextDatum","processedCount","dataFormat","parsedDatum","BinnedParser","matched","parseFloat","Binned","binsArr","bins","ContinuousParser","Continuous","FieldTypeRegistry","_fieldType","dimension","registerDefaultFields","registerFieldType","fieldRegistry","createFields","dataColumn","headers","headersObj","header","BUILDER","createUnitField","DataConverter","_type","EOL","EOF","QUOTE","NEWLINE","RETURN","objectConverter","columns","JSON","stringify","inferColumns","rows","columnSet","column","width","formatDate","getUTCHours","minutes","getUTCMinutes","seconds","getUTCSeconds","milliseconds","getUTCMilliseconds","getUTCFullYear","getUTCMonth","getUTCDate","delimiter","reFormat","DELIMITER","charCodeAt","parseRows","N","I","eof","eol","j","preformatBody","formatValue","formatRow","test","convert","customConverter","concat","formatBody","formatRows","DSVArr","schemaFields","unitSchema","firstRowHeader","columnMajor","headerMap","h","schemaField","headIndex","DSVStr","fieldSeparator","dsv","d3Dsv","DSVStringConverter","FlatJSON","insertionIndex","schemaFieldsName","JSONConverter","DSVArrayConverter","Auto","converters","AutoDataConverter","DataConverterStore","_getDefaultConverters","converter","converterStore","resp","updateFields","partialFieldspace","fieldStoreName","collID","partialFieldMap","newFields","coll","createUnitFieldFromPartial","persistCurrentDerivation","model","operation","criteriaFn","_derivation","op","meta","criteria","persistAncestorDerivation","sourceDm","newDm","_ancestorDerivation","persistDerivations","selectModeMap","diffIndex","calcDiff","generateRowDiffset","lastInsertedValue","li","selectRowDiffsetIterator","checker","newRowDiffSet","rejRowDiffSet","shouldSelect","shouldReject","checkerResult","rejectRowDiffset","rowSplitDiffsetIterator","splitRowDiffset","dimensionMap","dimensionSet","selectHelper","clonedDm","selectFn","iterator","cachedValueObjects","_partialFieldspace","_cachedValueObjects","cloneWithAllFields","clone","getPartialFieldspace","calculateFieldsConfig","getKey","fn","rowId","keyFn","domainChecker","some","dom","boundsChecker","isWithinDomain","fieldType","filterPropagationModel","propModels","fns","filterByDim","filterByMeasure","clonedModel","modelFieldsConfig","getFieldsConfig","propModel","identifiers","fieldNames","values","indices","dLen","def","valuesMap","rangeKeys","hasData","present","every","select","saveChild","addDiffsetToClonedDm","selectConfig","cloneWithProject","projField","allFields","cloned","projectionSet","actualProjField","splitWithProject","projFieldSet","projFields","sanitizeUnitSchema","sanitizeAndValidateSchema","validateUnitSchema","updateData","relation","defaultConfig","dataHeader","fieldNameAs","as","resolveFieldName","nameSpace","valueObjects","rawFieldsData","formattedFieldsData","rawData","prepareSelectionData","_dataFormat","applyExistingOperationOnModel","derivations","getDerivations","selectionModel","derivation","params","groupByString","getDerivationArguments","getRootGroupByModel","_parent","find","getRootDataModel","getPathToRootModel","path","propagateToAllDataModels","rootModels","propagationInf","propagationNameSpace","propagateToSource","propagationSourceId","sourceId","addGroupedModel","conf","crit","groupedModel","criterias","actionCriterias","mutableActions","filteredCriteria","entry","excludeModels","sourceActionCriterias","actionInf","actionConf","applyOnSource","action","models","propagationSource","rootModel","propConfig","sourceIdentifiers","inf","propagationModel","filteredModel","getFilteredModel","reverse","handlePropagation","propagateIdentifiers","propModelInf","propagate","children","_children","child","matchingCriteria","propagateImmutableActions","immutableActions","filterImmutableAction","criteriaModel","groupByModel","addToPropNamespace","sourceNamespace","isMutableAction","getNormalizedProFields","fieldConfig","normalizedProjField","constructor","search","Relation","source","_fieldStoreName","_propagationNameSpace","_fieldspace","joinWith","unionWith","differenceWith","defConfig","cloneConfig","extraCloneDm","setOfRowDiffsets","cloneWithSelect","setParent","_fieldConfig","fieldObj","removeChild","sibling","parent","_onPropagation","order","withUid","getAllFields","dataGenerated","fmtFieldIdx","elem","fIdx","fmtFn","datumIdx","ids","fill","fieldsArr","getData","dataInCSVArr","sortedDm","colData","rowsCount","serializedData","rowIdx","colIdx","fieldinst","dependency","replaceVar","fieldsConfig","depVars","retrieveFn","depFieldIndices","fieldSpec","fs","suppliedFields","computedValues","fieldsData","addField","addToNameSpace","payload","rootGroupByModel","eventName","measureFieldName","binFieldName","measureField","binsCount","dMin","dMax","ceil","abs","binnedData","createBinnedFieldData","binField","serialize","getSchema","clonedDMs","splitWithSelect","uniqueFields","commonFields","normalizedProjFieldSets","fieldSet","first","last","count","sd","std","Operators","compose","operations","currentDM","firstChild","dispose","bin","project","calculateVariable","naturalJoin","fullOuterJoin","version","Stats","FieldsUtility","enums"],"mappings":"CAAA,SAA2CA,EAAMC,GAC1B,iBAAZC,SAA0C,iBAAXC,OACxCA,OAAOD,QAAUD,IACQ,mBAAXG,QAAyBA,OAAOC,IAC9CD,OAAO,YAAa,GAAIH,GACE,iBAAZC,QACdA,QAAmB,UAAID,IAEvBD,EAAgB,UAAIC,IARtB,CASGK,QAAQ,WACX,O,YCTE,IAAIC,EAAmB,GAGvB,SAASC,EAAoBC,GAG5B,GAAGF,EAAiBE,GACnB,OAAOF,EAAiBE,GAAUP,QAGnC,IAAIC,EAASI,EAAiBE,GAAY,CACzCC,EAAGD,EACHE,GAAG,EACHT,QAAS,IAUV,OANAU,EAAQH,GAAUI,KAAKV,EAAOD,QAASC,EAAQA,EAAOD,QAASM,GAG/DL,EAAOQ,GAAI,EAGJR,EAAOD,QA0Df,OArDAM,EAAoBM,EAAIF,EAGxBJ,EAAoBO,EAAIR,EAGxBC,EAAoBQ,EAAI,SAASd,EAASe,EAAMC,GAC3CV,EAAoBW,EAAEjB,EAASe,IAClCG,OAAOC,eAAenB,EAASe,EAAM,CAAEK,YAAY,EAAMC,IAAKL,KAKhEV,EAAoBgB,EAAI,SAAStB,GACX,oBAAXuB,QAA0BA,OAAOC,aAC1CN,OAAOC,eAAenB,EAASuB,OAAOC,YAAa,CAAEC,MAAO,WAE7DP,OAAOC,eAAenB,EAAS,aAAc,CAAEyB,OAAO,KAQvDnB,EAAoBoB,EAAI,SAASD,EAAOE,GAEvC,GADU,EAAPA,IAAUF,EAAQnB,EAAoBmB,IAC/B,EAAPE,EAAU,OAAOF,EACpB,GAAW,EAAPE,GAA8B,iBAAVF,GAAsBA,GAASA,EAAMG,WAAY,OAAOH,EAChF,IAAII,EAAKX,OAAOY,OAAO,MAGvB,GAFAxB,EAAoBgB,EAAEO,GACtBX,OAAOC,eAAeU,EAAI,UAAW,CAAET,YAAY,EAAMK,MAAOA,IACtD,EAAPE,GAA4B,iBAATF,EAAmB,IAAI,IAAIM,KAAON,EAAOnB,EAAoBQ,EAAEe,EAAIE,EAAK,SAASA,GAAO,OAAON,EAAMM,IAAQC,KAAK,KAAMD,IAC9I,OAAOF,GAIRvB,EAAoB2B,EAAI,SAAShC,GAChC,IAAIe,EAASf,GAAUA,EAAO2B,WAC7B,WAAwB,OAAO3B,EAAgB,SAC/C,WAA8B,OAAOA,GAEtC,OADAK,EAAoBQ,EAAEE,EAAQ,IAAKA,GAC5BA,GAIRV,EAAoBW,EAAI,SAASiB,EAAQC,GAAY,OAAOjB,OAAOkB,UAAUC,eAAe1B,KAAKuB,EAAQC,IAGzG7B,EAAoBgC,EAAI,GAIjBhC,EAAoBA,EAAoBiC,EAAI,G,grEClFrD,IAAMC,EAAYC,EAAQ,GAE1BxC,EAAOD,QAAUwC,EAAUE,QAAUF,EAAUE,QAAUF,G,k3BCKzD,IAOeG,EAPI,CACfC,UAAW,WACXC,QAAS,SACTC,QAAS,SACTC,KAAM,QCCKC,EANU,CACrBC,YAAa,cACbC,SAAU,WACVC,OAAQ,UCCGC,EAJQ,CACnBC,WAAY,cCKDC,EALG,CACdC,QAAS,UACTC,UAAW,aCGAC,EANO,CAClBC,OAAQ,SACRC,QAAS,UACTC,IAAK,OCQMC,EAXY,CACvBC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,IAAK,MACLC,MAAO,QACPC,KAAM,OACNC,MAAO,QACPC,IAAK,OCRT,SAASC,EAAqBC,GAC1B,OAAIA,aAAgBC,KACTD,EAGJ,IAAIC,KAAKD,GASpB,SAASE,EAAKxC,GACV,OAAQA,EAAI,GAAL,IAAgBA,EAAOA,EA8BP,SAASyC,EAAmBC,GACnDC,KAAKD,OAASA,EACdC,KAAKC,cAAWC,EAChBF,KAAKG,gBAAaD,EAftBE,OAAOC,OAAS,SAAUC,GACtB,OAAOA,EAAKC,QAAQ,2BAA4B,SAkBpDT,EAAkBU,aAAe,IAIjCV,EAAkBW,wBAA0B,CACxCC,KAAM,EACNC,MAAO,EACPC,IAAK,EACLC,KAAM,EACNC,OAAQ,EACRC,OAAQ,EACRC,YAAa,GAUjBlB,EAAkBmB,oBAAsB,SAAUC,GAC9C,OAAO,SAAUC,GACb,IAAIC,EACJ,OAAIC,SAASD,EAAYE,SAASH,EAAK,KAC5BC,EAGJF,IAYfpB,EAAkByB,mBAAqB,SAAUC,EAAON,GACpD,OAAO,SAACC,GACJ,IACItF,EADAD,SAGJ,IAAKuF,EAAO,OAAOD,EAEnB,IAAMO,EAAON,EAAIO,cAEjB,IAAK9F,EAAI,EAAGC,EAAI2F,EAAMG,OAAQ/F,EAAIC,EAAGD,IACjC,GAAI4F,EAAM5F,GAAG8F,gBAAkBD,EAC3B,OAAO7F,EAIf,YAAUsE,IAANtE,EACOsF,EAEJ,OAqBfpB,EAAkB8B,oBAAsB,WACpC,IAAMC,EAAU,CACZC,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,SACA,SACA,UACA,YACA,WACA,SACA,aAGFC,EAAY,CACdF,MAAO,CACH,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,MACA,OAEJC,KAAM,CACF,UACA,WACA,QACA,QACA,MACA,OACA,OACA,SACA,YACA,UACA,WACA,aAsPR,MAlPoB,CAChBE,EAAG,CAEC9F,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAGP,OAFUzB,EAAoByB,GAErBmB,WAAWC,aAG5B1G,EAAG,CAECM,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GACP,IACMqB,EADI9C,EAAoByB,GACdmB,WAAa,GAE7B,OAAkB,IAAVE,EAAc,GAAKA,GAAOD,aAG1C7E,EAAG,CAECvB,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCG,EAAG,CAECtG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,WACpBC,OAAQ,SAACjB,GACL,OAAIA,EACOA,EAAIO,cAER,MAEXW,UAAW,SAAClB,GAIR,OAHUzB,EAAoByB,GACdmB,WAEA,GAAK,KAAO,OAGpCI,EAAG,CAECvG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACfwB,gBAKvBC,EAAG,CAECzG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACZ0B,gBAK1BC,EAAG,CAEC3G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACjB4B,kBAEHR,aAGlBS,EAAG,CAEC7G,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQC,MAAMmB,KAAK,KAA9B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQC,OACrDO,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQC,MAAMoB,GAAMX,aAGpCa,EAAG,CAECjH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWN,EAAQE,KAAKkB,KAAK,KAA7B,KACbb,OAAQtC,EAAkByB,mBAAmBM,EAAQE,MACrDM,UAND,SAMYlB,GACP,IACM+B,EADIxD,EAAoByB,GAChBgC,SAEd,OAAQtB,EAAQE,KAAKmB,GAAMX,aAGnCc,EAAG,CAEClH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GAChBmC,UAEHf,aAGnBrG,EAAG,CAECC,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GAChBmC,aAKtBC,EAAG,CAECpH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUF,MAAMmB,KAAK,KAAhC,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUF,OACvDO,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUF,MAAM0B,GAAQjB,aAGxCmB,EAAG,CAECvH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,UAAWH,EAAUD,KAAKkB,KAAK,KAA/B,KACbb,OAAQtC,EAAkByB,mBAAmBS,EAAUD,MACvDM,UAND,SAMYlB,GACP,IACMqC,EADI9D,EAAoByB,GACdsC,WAEhB,OAAQzB,EAAUD,KAAKyB,GAAQjB,aAGvCvG,EAAG,CAECG,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,UACpBC,OALD,SAKSjB,GAAO,OAAOrB,EAAkBmB,qBAAlBnB,CAAwCqB,GAAO,GACrEkB,UAND,SAMYlB,GAIP,OAAOtB,EAHGH,EAAoByB,GACdsC,WAEG,KAG3BE,EAAG,CAECxH,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OALD,SAKSjB,GACJ,IAAIyC,SACJ,GAAIzC,EAAK,CACL,IAAMtF,EAAIsF,EAAIQ,OACdR,EAAMA,EAAI0C,UAAUhI,EAAI,EAAGA,GAE/B,IAAIuF,EAAYtB,EAAkBmB,qBAAlBnB,CAAwCqB,GACpD2C,EAAc,IAAIlE,KAClBmE,EAAcC,KAAKC,MAAOH,EAAYI,cAAiB,KAO3D,OAHIxE,EAFJkE,KAAYG,EAAc3C,GAEM8C,cAAgBJ,EAAYI,gBACxDN,MAAYG,EAAc,GAAI3C,GAE3B1B,EAAoBkE,GAAQM,eAEvC7B,UAtBD,SAsBYlB,GACP,IACIgD,EADMzE,EAAoByB,GACjB+C,cAAc3B,WACvB1G,SAOJ,OALIsI,IACAtI,EAAIsI,EAAKxC,OACTwC,EAAOA,EAAKN,UAAUhI,EAAI,EAAGA,IAG1BsI,IAGfC,EAAG,CAECjI,KAAM,IACN+F,MAAO,EACPC,QAJD,WAIc,MAAO,YACpBC,OAAQtC,EAAkBmB,sBAC1BoB,UAND,SAMYlB,GAIP,OAHUzB,EAAoByB,GACf+C,cAAc3B,eAgB7CzC,EAAkBuE,oBAAsB,WACpC,IAAMC,EAAcxE,EAAkB8B,sBAEtC,MAAO,CACHf,KAAMyD,EAAYrC,EAClBsC,QAASD,EAAYzI,EACrB2I,eAAgBF,EAAY5G,EAC5B+G,eAAgBH,EAAY7B,EAC5B3B,OAAQwD,EAAY5B,EACpB3B,OAAQuD,EAAY1B,EACpB8B,UAAWJ,EAAYtB,EACvB2B,SAAUL,EAAYlB,EACtBwB,aAAcN,EAAYjB,EAC1BwB,4BAA6BP,EAAYpI,EACzC4I,YAAaR,EAAYf,EACzBwB,WAAYT,EAAYZ,EACxBsB,cAAeV,EAAYtI,EAC3BiJ,WAAYX,EAAYX,EACxBuB,UAAWZ,EAAYF,IAW/BtE,EAAkBqF,cAAgB,WAC9B,IAAMb,EAAcxE,EAAkB8B,sBAChCwD,EAAkB,WAMpB,IALA,IAAIxJ,EAAI,EACJyJ,SACAC,SACEzJ,EAAI,UAAK8F,OAER/F,EAAIC,EAAGD,IACVyJ,oBAAWzJ,OAAX,YAAWA,IACX,kBAASA,OAAT,YAASA,MACL0J,EAAcD,GAItB,OAAKC,EAEEA,EAAY,GAAGlD,OAAOkD,EAAY,IAFd,MAK/B,MAAO,CACH5E,KAAM,CAAC4D,EAAYX,EAAGW,EAAYF,EAC9BgB,GAEJzE,MAAO,CAAC2D,EAAYf,EAAGe,EAAYZ,EAAGY,EAAYtI,EAC9CoJ,GAEJxE,IAAK,CAAC0D,EAAYtB,EAAGsB,EAAYlB,EAAGkB,EAAYjB,EAAGiB,EAAYpI,EAC3DkJ,GAEJvE,KAAM,CAACyD,EAAYrC,EAAGqC,EAAYzI,EAAGyI,EAAY5G,EAAG4G,EAAY7B,EAC5D,SAAU8C,EAAcC,EAAcC,EAAWC,GAC7C,IAAIJ,SACAK,SACAC,SACAzE,SAcJ,OAZIqE,IAAiBG,EAAUF,GAAaC,IACJ,OAAhCC,EAAO,GAAGvD,OAAOuD,EAAO,MACxBC,GAAO,GAGXN,EAAcE,GAEdF,EADOE,GAGOD,EAGbD,GAELnE,EAAMmE,EAAY,GAAGlD,OAAOkD,EAAY,IACpCM,IACAzE,GAAO,IAEJA,GANoB,OASnCL,OAAQ,CAACwD,EAAY5B,EACjB0C,GAEJrE,OAAQ,CAACuD,EAAY1B,EACjBwC,KAUZtF,EAAkB+F,WAAa,SAAU9F,GAQrC,IAPA,IAAM+F,EAAchG,EAAkBU,aAChC8D,EAAcxE,EAAkB8B,sBAChCmE,EAAgBzJ,OAAO0J,KAAK1B,GAC5B2B,EAAa,GACfrK,SACAsK,UAEItK,EAAImE,EAAOoG,QAAQL,EAAalK,EAAI,KAAO,GAC/CsK,EAAcnG,EAAOnE,EAAI,IACmB,IAAxCmK,EAAcI,QAAQD,IAE1BD,EAAWG,KAAK,CACZlE,MAAOtG,EACPyK,MAAOH,IAIf,OAAOD,GASXnG,EAAkBwG,SAAW,SAAU3G,EAAMI,GACzC,IAQIlE,EARE0K,EAAQ7G,EAAoBC,GAC5BsG,EAAanG,EAAkB+F,WAAW9F,GAC1CuE,EAAcxE,EAAkB8B,sBAClC4E,EAAeC,OAAO1G,GACpB+F,EAAchG,EAAkBU,aAClC6F,SACAK,SACA9K,SAGJ,IAAKA,EAAI,EAAGC,EAAIoK,EAAWtE,OAAQ/F,EAAIC,EAAGD,IAEtC8K,EAAepC,EADf+B,EAAQJ,EAAWrK,GAAGyK,OACYhE,UAAUkE,GAC5CC,EAAeA,EAAajG,QAAQ,IAAIH,OAAO0F,EAAcO,EAAO,KAAMK,GAG9E,OAAOF,GAQX1G,EAAkBtC,UAAUmJ,MAAQ,SAAUC,EAAeC,GACzD,IAAM1B,EAAgBrF,EAAkBqF,gBAClClF,EAAWD,KAAK8G,kBAAkBF,GAClCG,EAAajH,EAAkBW,wBAC/BuG,EAAUH,GAAWA,EAAQG,QAC7BC,EAAa,GACbC,EAAO,GACTC,SACAC,SACAC,SACAlG,SACAvF,SACA0L,SACAC,SACA1L,SACA+H,EAAS,GAEb,IAAKuD,KAAehC,EAChB,GAAK,GAAG1H,eAAe1B,KAAKoJ,EAAegC,GAA3C,CAMA,IAJAD,EAAKvF,OAAS,EAEd0F,GADAD,EAAiBjC,EAAcgC,IACHK,OAAOJ,EAAezF,OAAS,EAAG,GAAG,GAE5D/F,EAAI,EAAGC,EAAIuL,EAAezF,OAAQ/F,EAAIC,EAAGD,SAI9BsE,KAFZiB,EAAMlB,GADNqH,EAAQF,EAAexL,IACFO,OAGjB+K,EAAKd,KAAK,MAEVc,EAAKd,KAAK,CAACkB,EAAOnG,IAM1B,GAAI,OAFJoG,EAAcF,EAAWI,MAAMzH,KAAMkH,MAEuBF,EACxD,MAGJC,EAAWF,EAAWI,IAAgBI,EAU1C,OAPIN,EAAWtF,QAAU3B,KAAK0H,gBAAgBT,EAAWtF,QAErDiC,EAAO+D,QAAQV,EAAW,GAAI,EAAG,GAEjCrD,EAAO+D,QAAP,MAAA/D,EAAkBqD,GAGfrD,GAQX9D,EAAkBtC,UAAUsJ,kBAAoB,SAAUF,GACtD,IAYI/K,EAZEkE,EAASC,KAAKD,OACduE,EAAcxE,EAAkB8B,sBAChCkE,EAAchG,EAAkBU,aAChCyF,EAAanG,EAAkB+F,WAAW9F,GAC1C6H,EAAW,GAEbC,SACAC,SACAC,SACAC,SACAC,SAGArM,SAEJqM,EAAcxB,OAAO1G,GAErB,IAAMmI,EAAWjC,EAAWkC,KAAI,SAAAC,GAAA,OAAOA,EAAI/B,SACrCgC,EAAmBpC,EAAWtE,OACpC,IAAK/F,EAAIyM,EAAmB,EAAGzM,GAAK,EAAGA,KACnCmM,EAAW9B,EAAWrK,GAAGsG,OAEV,IAAM+F,EAAYtG,OAAS,QAKdzB,IAAxB2H,IACAA,EAAsBI,EAAYtG,QAGtCqG,EAAaC,EAAYpE,UAAUkE,EAAW,EAAGF,GACjDI,EAAcA,EAAYpE,UAAU,EAAGkE,EAAW,GAC9C3H,OAAOC,OAAO2H,GACdC,EAAYpE,UAAUgE,EAAqBI,EAAYtG,QAE3DkG,EAAsBE,GAblBF,EAAsBE,EAgB9B,IAAKnM,EAAI,EAAGA,EAAIyM,EAAkBzM,IAC9BkM,EAAS7B,EAAWrK,GACpBqM,EAAcA,EAAY1H,QAAQuF,EAAcgC,EAAOzB,MAAO/B,EAAYwD,EAAOzB,OAAOlE,WAG5F,IAAMmG,EAAgB1B,EAAc2B,MAAM,IAAInI,OAAO6H,KAAiB,GAGtE,IAFAK,EAAcE,QAET5M,EAAI,EAAGC,EAAIqM,EAASvG,OAAQ/F,EAAIC,EAAGD,IACpCgM,EAASM,EAAStM,IAAM0M,EAAc1M,GAE1C,OAAOgM,GAQX9H,EAAkBtC,UAAUiL,cAAgB,SAAU7B,GAClD,IAAIjH,EAAO,KACX,GAAI+I,OAAOrH,SAASuF,GAChBjH,EAAO,IAAIC,KAAKgH,QACb,IAAK5G,KAAKD,QAAUH,KAAK+G,MAAMC,GAClCjH,EAAO,IAAIC,KAAKgH,OAEf,CACD,IAAM3G,EAAWD,KAAKC,SAAWD,KAAK2G,MAAMC,GACxC3G,EAAS0B,SACT3B,KAAKG,WAAL,kCAAsBP,KAAtB,c,sHAAA,CAA8BK,MAC9BN,EAAOK,KAAKG,YAGpB,OAAOR,GAGXG,EAAkBtC,UAAUkK,gBAAkB,SAASiB,GACnD,OAAe,IAARA,GAAa3I,KAAKD,OAAOwI,MAAM,QAAQ5G,QASlD7B,EAAkBtC,UAAU8I,SAAW,SAAUvG,EAAQ6G,GACrD,IAAIzG,SAQJ,OANIyG,EACAzG,EAAaH,KAAKG,WAAaH,KAAKyI,cAAc7B,IACzCzG,EAAaH,KAAKG,cAC3BA,EAAaH,KAAKyI,cAAc7B,IAG7B9G,EAAkBwG,SAASnG,EAAYJ,ICruBnC,eAAC6I,GACZ,IAAIhN,EAAI,EACR,OAAO,WAAe,2BAAXiN,EAAW,qBAAXA,EAAW,gBAClBA,EAAOC,SAAQ,SAAC3H,EAAK4H,GACXH,EAAMG,aAAuBC,QAC/BJ,EAAMG,GAAcC,MAAMC,KAAK,CAAEtH,OAAQ/F,KAE7CgN,EAAMG,GAAY3C,KAAKjF,MAE3BvF,M,4MCdFsN,EAAe,SACfC,EAAgB7M,OAAOkB,UAAU+E,SACjC6G,EAAc,kBACdC,EAAa,iBAEnB,SAASC,EAAelB,EAAKmB,GAIzB,IAHA,IAAI3N,EAAI2N,EAAU5H,OACd6H,GAAU,EAEP5N,GAAG,CACN,GAAIwM,IAAQmB,EAAU3N,GAElB,OADA4N,EAAS5N,EAGbA,GAAK,EAGT,OAAO4N,EA2GX,SAASC,EAASC,EAAMC,EAAMC,GAE1B,YAAI,IAAOF,EAAP,cAAOA,MAASR,SAAgB,IAAOS,EAAP,cAAOA,MAAST,EACzC,WAGP,IAAOS,EAAP,cAAOA,MAAST,GAAyB,OAATS,EACzBD,SAGP,IAAOA,EAAP,cAAOA,MAASR,IAChBQ,EAAOC,aAAgBX,MAAQ,GAAK,IAnH5C,SAASa,EAAMH,EAAMC,EAAMC,EAAWE,EAAQC,GAC1C,IAAIC,EACAC,EACAC,EACAC,EACAC,EAcJ,GATKL,GAKDD,EAAO1D,KAAKsD,GACZK,EAAO3D,KAAKuD,KALZG,EAAS,CAACJ,GACVK,EAAS,CAACJ,IAOVA,aAAgBX,MAChB,IAAKgB,EAAO,EAAGA,EAAOL,EAAKhI,OAAQqI,GAAQ,EAAG,CAC1C,IACIC,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,eAGA,IAAO6G,EAAP,cAAOA,MAAWhB,EACZU,QAAwB1J,IAAXgK,IACfR,EAAKM,GAAQE,IAIF,OAAXD,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQE,aAAkBlB,MAAQ,GAAK,KAG3C,KADdoB,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,SAMrD,IAAKC,KAAQL,EAAM,CACf,IACIM,EAASP,EAAKM,GACdE,EAASP,EAAKK,GAElB,MAAO3G,GACH,SAGJ,GAAe,OAAX6G,SAAmB,IAAOA,EAAP,cAAOA,MAAWhB,GAKrCiB,EAAMhB,EAAcpN,KAAKmO,MACbd,GACO,OAAXa,SAAmB,IAAOA,EAAP,cAAOA,MAAWf,IACrCe,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAGxCI,IAAQd,GACE,OAAXY,GAAqBA,aAAkBjB,QACvCiB,EAASP,EAAKM,GAAQ,KAGZ,KADdI,EAAOd,EAAeY,EAAQH,IAE1BE,EAASP,EAAKM,GAAQF,EAAOM,GAG7BP,EAAMI,EAAQC,EAAQN,EAAWE,EAAQC,IAI7CL,EAAKM,GAAQE,MAGhB,CACD,GAAIN,QAAwB1J,IAAXgK,EACb,SAEJR,EAAKM,GAAQE,GAIzB,OAAOR,EAiBPG,CAAMH,EAAMC,EAAMC,GACXF,GCnIJ,SAASW,EAASlJ,GACrB,OAAO6H,MAAMqB,QAAQlJ,GA2ClB,IAAMmJ,EAAc,wBAAY,IAAI1K,MAAO2K,UAAYvG,KAAKwG,MAAsB,IAAhBxG,KAAKyG,WASvE,SAASC,EAAWC,EAAMC,GAC7B,IAAKP,EAAQM,KAAUN,EAAQO,GAC3B,OAAOD,IAASC,EAGpB,GAAID,EAAKhJ,SAAWiJ,EAAKjJ,OACrB,OAAO,EAGX,IAAK,IAAI/F,EAAI,EAAGA,EAAI+O,EAAKhJ,OAAQ/F,IAC7B,GAAI+O,EAAK/O,KAAOgP,EAAKhP,GACjB,OAAO,EAIf,OAAO,EASJ,SAASiP,EAAa1J,GACzB,OAAOA,EASJ,IAAM2J,EAAmB,SAACC,GAC7B,MAnEsB,iBAmETA,EACFhN,EAAWE,QACXoM,EAAQU,IAASV,EAAQU,EAAK,IAC9BhN,EAAWG,QACXmM,EAAQU,KAA0B,IAAhBA,EAAKpJ,QAlF/B,SAAmBR,GACtB,OAAOA,IAAQ7E,OAAO6E,GAiF4B6J,CAASD,EAAK,KACrDhN,EAAWC,UAEf,MChDIiN,EApDI,CACfF,KAAM,GAENG,gBAHe,SAGEC,EAAUhP,GACvB,IAAMiP,EAASjP,GAAQmO,IA4CvB,OA1CAtK,KAAK+K,KAAKK,GAAU,CAChBjP,KAAMiP,EACNvC,OAAQsC,EAERE,UAJgB,WAKZ,IAAIA,EAAYrL,KAAKsL,iBAQrB,OANKD,IACDA,EAAYrL,KAAKsL,iBAAmB,GACpCtL,KAAK6I,OAAOC,SAAQ,SAACyC,GACjBF,EAAUE,EAAMpP,QAAUoP,MAG3BF,GAEXG,WAfgB,WAgBZ,IAAIC,EAAgBzL,KAAK0L,eAUzB,OARKD,IACDA,EAAgBzL,KAAK0L,eAAiB,GACtC1L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUC,UAClC8M,EAAcF,EAAMpP,QAAUoP,OAInCE,GAEXI,aA5BgB,WA6BZ,IAAIC,EAAkB9L,KAAK+L,iBAU3B,OARK/L,KAAK+L,mBACND,EAAkB9L,KAAK+L,iBAAmB,GAC1C/L,KAAK6I,OAAOC,SAAQ,SAACyC,GACbA,EAAMI,SAASC,OAASlN,EAAUE,YAClCkN,EAAgBP,EAAMpP,QAAUoP,OAIrCO,IAGR9L,KAAK+K,KAAKK,K,yPCqCVY,E,WAxEX,WAAanP,EAAOoP,EAAUV,I,4FAAO,SACjC,IAAMW,EAAiBC,GAAsBZ,EAAO1O,GAEpDP,OAAO8P,iBAAiBpM,KAAM,CAC1BqM,OAAQ,CACJ7P,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,SAEJ2P,gBAAiB,CACbhQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOqP,GAEXO,eAAgB,CACZjQ,YAAY,EACZ8P,cAAc,EACdC,UAAU,EACV1P,MAAOoP,KAIfjM,KAAKuL,MAAQA,E,6CAkCb,OAAO9E,OAAOzG,KAAKnD,S,gCAUnB,OAAOmD,KAAKnD,Q,4BAnCZ,OAAOmD,KAAKqM,S,qCAOZ,OAAOrM,KAAKwM,kB,oCAOZ,OAAOxM,KAAKyM,mB,KCxDb,SAASC,EAAoBC,EAAYC,GACxCD,EAAWhL,OAAS,GACDgL,EAAWE,MAAM,KACzB/D,SAAQ,SAACgE,GAChB,IAAMC,EAAaD,EAAQD,MAAM,KAC3BG,GAAUD,EAAW,GACrBE,IAAQF,EAAW,IAAMA,EAAW,IAC1C,GAAIE,GAAOD,EACP,IAAK,IAAIpR,EAAIoR,EAAOpR,GAAKqR,EAAKrR,GAAK,EAC/BgR,EAAShR,M,6PCVvBsR,E,WAqBF,WAAarQ,I,4FAAO,SAChBmD,KAAKqM,OAASxP,E,wDAdOsQ,GACrB,OAAKA,EAGE7Q,OAAO8Q,OAAOF,EAAkBG,qBAAsBF,GAFlDD,EAAkBG,yB,mCAsB7B,OAAOrN,KAAKqM,S,iCAUZ,OAAO5F,OAAOzG,KAAKqM,W,iCAGNlL,GACb,OAAQA,aAAe+L,KAAwBA,EAAkBI,mBAAmBnM,K,qCAGlEA,GAClB,OAAOA,aAAe+L,EAAoB/L,EAAM+L,EAAkBI,mBAAmBnM,O,KAO7F+L,EAAkBK,KAAO,IAAIL,EAAkB,QAC/CA,EAAkBM,GAAK,IAAIN,EAAkB,MAC7CA,EAAkBO,IAAM,IAAIP,EAAkB,OAO9CA,EAAkBG,qBAAuB,CACrCK,QAASR,EAAkBM,GAC3BG,IAAKT,EAAkBO,IACvBG,KAAMV,EAAkBK,KACxBrN,UAAWgN,EAAkBM,IAGlBN,Q,8YC5ETW,EAAkB,SAACC,EAASd,EAAOC,GAIrC,IAHA,IAAMc,EAAU,GACZC,EAAOhB,EAEJgB,EAAOf,GACVc,EAAQ3H,KAAK4H,GACbA,GAAQF,EAIZ,OAFAC,EAAQ3H,KAAK4H,GAEND,GAGLE,EAAkB,SAACC,EAAcrR,GAOnC,IANA,IAAIsR,EAAU,EACVC,EAAWF,EAAavM,OAAS,EACjC0M,SACA7M,SAGG2M,GAAWC,GAAU,CAIxB,GAAIvR,IAFJ2E,EAAQ0M,EADRG,EAASF,EAAUnK,KAAKsK,OAAOF,EAAWD,GAAW,KAGlCnB,OAASnQ,EAAQ2E,EAAMyL,IACtC,OAAOzL,EACA3E,GAAS2E,EAAMyL,IACtBkB,EAAUE,EAAS,EACZxR,EAAQ2E,EAAMwL,QACrBoB,EAAWC,EAAS,GAI5B,OAAO,MChCJ,IAKME,EAAS,SAKTC,EAAiB,CAC1BC,OAAQ,SACRC,QAAS,UACTC,QAAS,QACTC,QAAS,UACTC,QAAS,qBACTC,IAAK,MACLC,KAAM,QAGGC,EAAQ,CACjBC,MAAO,QACPC,UAAW,YACXC,WAAY,aACZC,QAAS,UACTC,UAAW,aAGFC,EACJ,M,wHCzBF,SAASC,EAAiBC,EAAKC,GAClC,IAAMC,EAAS,GACTC,EAAS,GASf,OARAH,EAAI3G,OAAOC,SAAQ,SAACyC,GAChBoE,EAAOvJ,KAAKmF,EAAMI,SAASxP,SAE/BsT,EAAI5G,OAAOC,SAAQ,SAACyC,IAC6B,IAAzCoE,EAAOxJ,QAAQoF,EAAMI,SAASxP,OAC9BuT,EAAOtJ,KAAKmF,EAAMI,SAASxP,SAG5BuT,ECRX,SAASE,IAAoB,OAAO,EAY7B,SAASC,EAAcC,EAAKC,EAAKC,GAA+D,IAArDC,EAAqD,wDAAxBC,EAAwB,uDAAblB,EAAMC,MACtFtD,EAAS,GACTZ,EAAO,GACPoF,EAAqBH,GAAYJ,EACjCQ,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBE,EAAoBH,EAAcjU,KAClCqU,EAAoBF,EAAcnU,KAClCA,EAAUiU,EAAcjU,KAAxB,IAAgCmU,EAAcnU,KAC9CsU,EAAmBlB,EAAgBa,EAAeE,GAExD,GAAIC,IAAsBC,EACtB,MAAM,IAAIE,MAAM,8CAqFpB,OAlFAN,EAAcvH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMoF,EAAYlH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C8E,EAAiBtK,QAAQwK,EAAUxU,OAAiB8T,IACpDU,EAAUxU,KAAUiU,EAAcjU,KAAlC,IAA0CwU,EAAUxU,MAExDwP,EAAOvF,KAAKuK,MAEhBL,EAAczH,OAAOC,SAAQ,SAACyC,GAC1B,IAAMoF,EAAYlH,EAAQ,GAAI8B,EAAMI,WACc,IAA9C8E,EAAiBtK,QAAQwK,EAAUxU,MAC9B8T,IACDU,EAAUxU,KAAUmU,EAAcnU,KAAlC,IAA0CwU,EAAUxU,KACpDwP,EAAOvF,KAAKuK,IAGhBhF,EAAOvF,KAAKuK,MAKpBjE,EAAmBoD,EAAIc,aAAa,SAAChV,GACjC,IAAIiV,GAAW,EACXC,SACJpE,EAAmBqD,EAAIa,aAAa,SAACG,GACjC,IAAMC,EAAQ,GACRC,EAAU,GAChBA,EAAQV,GAAqB,GAC7BU,EAAQT,GAAqB,GAC7BJ,EAAcvH,OAAOC,SAAQ,SAACyC,GAC1ByF,EAAM5K,KAAKmF,EAAM2F,aAAanG,KAAKnP,IACnCqV,EAAQV,GAAmBhF,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM2F,aAAanG,KAAKnP,GAClCsQ,eAAgBX,EAAM4F,gBAAgBvV,OAG9C0U,EAAczH,OAAOC,SAAQ,SAACyC,IAC+B,IAAnDkF,EAAiBtK,QAAQoF,EAAMI,SAASxP,OAAgB8T,GAC1De,EAAM5K,KAAKmF,EAAM2F,aAAanG,KAAKgG,IAEvCE,EAAQT,GAAmBjF,EAAMpP,QAAU,CACvC8P,SAAUV,EAAM2F,aAAanG,KAAKgG,GAClC7E,eAAgBX,EAAM4F,gBAAgBJ,OAI9C,IAIMK,EAAYC,GAAgBJ,EAAQV,IACpCe,EAAYD,GAAgBJ,EAAQT,IAC1C,GAAIL,EAAmBiB,EAAWE,GALb,kBAAMxB,EAAIyB,kBACV,kBAAMxB,EAAIwB,iBAFb,IAMyE,CACvF,IAAMC,EAAW,GACjBR,EAAMlI,SAAQ,SAAC2I,EAASC,GACpBF,EAAS7F,EAAO+F,GAAKvV,MAAQsV,KAE7BZ,GAAY7B,EAAMC,QAAUiB,EAC5BnF,EAAK+F,GAAeU,GAGpBzG,EAAK3E,KAAKoL,GACVX,GAAW,EACXC,EAAclV,QAEf,IAAKsU,IAAalB,EAAME,WAAagB,IAAalB,EAAMG,cAAgB0B,EAAU,CACrF,IAAMW,EAAW,GACb7I,EAAMyH,EAAcvH,OAAOlH,OAAS,EACxCqP,EAAMlI,SAAQ,SAAC2I,EAASC,GAEhBF,EAAS7F,EAAO+F,GAAKvV,MADrBuV,GAAO/I,EACsB8I,EAGA,QAGrCZ,GAAW,EACXC,EAAclV,EACdmP,EAAK3E,KAAKoL,UAKf,IAAI5T,GAAUmN,EAAMY,EAAQ,CAAExP,SCjHzC,SAASwV,EAAW3O,EAAGO,GACnB,IAAMqO,EAAKA,GAAG5O,EACR6O,EAAKA,GAAGtO,EACd,OAAIqO,EAAKC,GACG,EAERD,EAAKC,EACE,EAEJ,EAqEJ,SAASC,EAAWC,GAAyB,IAApBC,EAAoB,uDAAXL,EAIrC,OAHII,EAAIpQ,OAAS,GArBrB,SAASsQ,EAAMF,EAAKG,EAAIC,EAAIH,GACxB,GAAIG,IAAOD,EAAM,OAAOH,EAExB,IAAMK,EAAMF,EAAKlO,KAAKsK,OAAO6D,EAAKD,GAAM,GAKxC,OAJAD,EAAKF,EAAKG,EAAIE,EAAKJ,GACnBC,EAAKF,EAAKK,EAAM,EAAGD,EAAIH,GAzC3B,SAAgBD,EAAKG,EAAIE,EAAKD,EAAIH,GAG9B,IAFA,IAAMK,EAAUN,EACVO,EAAS,GACN1W,EAAIsW,EAAItW,GAAKuW,EAAIvW,GAAK,EAC3B0W,EAAO1W,GAAKyW,EAAQzW,GAKxB,IAHA,IAAIoH,EAAIkP,EACJ3O,EAAI6O,EAAM,EAELxW,EAAIsW,EAAItW,GAAKuW,EAAIvW,GAAK,EACvBoH,EAAIoP,GACJC,EAAQzW,GAAK0W,EAAO/O,GACpBA,GAAK,GACEA,EAAI4O,GACXE,EAAQzW,GAAK0W,EAAOtP,GACpBA,GAAK,GACEgP,EAAOM,EAAOtP,GAAIsP,EAAO/O,KAAO,GACvC8O,EAAQzW,GAAK0W,EAAOtP,GACpBA,GAAK,IAELqP,EAAQzW,GAAK0W,EAAO/O,GACpBA,GAAK,GAqBbsG,CAAMkI,EAAKG,EAAIE,EAAKD,EAAIH,GAEjBD,EAcHE,CAAKF,EAAK,EAAGA,EAAIpQ,OAAS,EAAGqQ,GAE1BD,E,0gBChCX,SAASQ,EAAqBC,EAAUC,GACpC,IAAMC,EAAmD,SAAvCjM,OAAOgM,GAAc/Q,cAA2B,OAAS,MAC3E,OA9CJ,SAAoBiR,EAAUC,GAC1B,IAAIC,SAEJ,OAAQF,GACR,KAAKnU,EAAeC,WACpB,KAAKL,EAAiBE,SAEduU,EADa,QAAbD,EACU,SAAC5P,EAAGO,GAAJ,OAAUP,EAAIO,GAEd,SAACP,EAAGO,GAAJ,OAAUA,EAAIP,GAE5B,MACJ,QAEQ6P,EADa,QAAbD,EACU,SAAC5P,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,EAAI,GAAK,GAGd,SAACP,EAAGO,GAGV,OAFAP,KAAOA,MACPO,KAAOA,GAEI,EAEJP,EAAIO,GAAK,EAAI,GAKhC,OAAOsP,EAYAC,CAAUN,EAAS5G,KAAM8G,GAUpC,SAASK,EAAWhI,EAAMhC,GACtB,IAAMiK,EAAU,IAAIC,IACdC,EAAc,GAYpB,OAVAnI,EAAKjC,SAAQ,SAACqK,GACV,IAAMC,EAAWD,EAAMpK,GACnBiK,EAAQK,IAAID,GACZF,EAAYF,EAAQvW,IAAI2W,IAAW,GAAGhN,KAAK+M,IAE3CD,EAAY9M,KAAK,CAACgN,EAAU,CAACD,KAC7BH,EAAQM,IAAIF,EAAUF,EAAYvR,OAAS,OAI5CuR,EAYX,SAASK,EAAoBC,EAAcC,EAAcC,GACrD,IAAMrO,EAAM,CACRsO,MAAOH,EAAa,IAQxB,OALAC,EAAaG,QAAO,SAACC,EAAK7F,EAAM8F,GAE5B,OADAD,EAAI7F,GAAQwF,EAAa,GAAGrL,KAAI,SAAAgL,GAAA,OAASA,EAAMO,EAAmBI,GAAK5R,UAChE2R,IACRxO,GAEIA,EAUX,SAAS0O,EAAmBhJ,EAAMY,EAAQqI,GAMtC,IALA,IAAIC,SACAC,SACA1B,SACA5W,EAAIoY,EAAerS,OAAS,EAEzB/F,GAAK,EAAGA,IACXqY,EAAYD,EAAepY,GAAG,GAC9BsY,EAAWF,EAAepY,GAAG,IAC7B4W,EAAW2B,GAAcxI,EAAQsI,MVrFf,mBU4FHC,EAEXpC,EAAU/G,GAAM,SAAC/H,EAAGO,GAAJ,OAAU2Q,EAASlR,EAAEwP,EAAStQ,OAAQqB,EAAEiP,EAAStQ,WAC1DmI,EAAQ6J,GAAW,WAC1B,IAAMhB,EAAcH,EAAUhI,EAAMyH,EAAStQ,OACvCkS,EAAYF,EAASA,EAASvS,OAAS,GACvC8R,EAAeS,EAASG,MAAM,EAAGH,EAASvS,OAAS,GACnD+R,EAAqBD,EAAatL,KAAI,SAAAmM,GAAA,OAAKH,GAAcxI,EAAQ2I,MAEvEpB,EAAYpK,SAAQ,SAAC0K,GACjBA,EAAapN,KAAKmN,EAAmBC,EAAcC,EAAcC,OAGrE5B,EAAUoB,GAAa,SAAClQ,EAAGO,GACvB,IAAMvH,EAAIgH,EAAE,GACN3F,EAAIkG,EAAE,GACZ,OAAO6Q,EAAUpY,EAAGqB,MAIxB0N,EAAKpJ,OAAS,EACduR,EAAYpK,SAAQ,SAACqK,GACjBpI,EAAK3E,KAAL,MAAA2E,EAAA,EAAaoI,EAAM,QAnBG,GAqBvB,WACH,IAAMnB,EAASO,EAAoBC,EAAU0B,GAE7CpC,EAAU/G,GAAM,SAAC/H,EAAGO,GAAJ,OAAUyO,EAAOhP,EAAEwP,EAAStQ,OAAQqB,EAAEiP,EAAStQ,WAH5D,IAiBf,I,GAAMqS,GAAsB,SAAtBA,EAAuBC,EAAYzJ,EAAMY,EAAQqI,GACnD,GAA0B,IAAtBQ,EAAW7S,OAAgB,OAAOoJ,EAEtC,IAAM0J,EAAYD,EAAW,GACvBrM,EAAM,IAAI8K,IAEhBlI,EAAK6I,QAAO,SAACC,EAAKa,GACd,IAAMC,EAAOD,EAAQD,EAAUvS,OAM/B,OALI2R,EAAIR,IAAIsB,GACRd,EAAIpX,IAAIkY,GAAMvO,KAAKsO,GAEnBb,EAAIP,IAAIqB,EAAM,CAACD,IAEZb,IACR1L,GAdmE,2BAgBtE,YAAuBA,EAAvB,+CAA4B,wBAAlBhL,EAAkB,KAAbgE,EAAa,KAClByT,EAAOL,EAAoBC,EAAWH,MAAM,GAAIlT,EAAKwK,EAAQqI,GACnE7L,EAAImL,IAAInW,EAAKyX,GACT5L,MAAMqB,QAAQuK,IACdb,EAAkBa,EAAMjJ,EAAQqI,IApB8B,6EAwBtE,OAAO7L,GA2CJ,SAAS0M,GAAUC,EAASd,GAAgB,IACzCrI,EAAiBmJ,EAAjBnJ,OAAQZ,EAAS+J,EAAT/J,KAGd,GAA8B,KAD9BiJ,EAAiBA,EAAee,QAAO,SAAAC,GAAA,QAAab,GAAcxI,EAAQqJ,EAAQ,QAC/DrT,OAAnB,CAEA,IAAIsT,EAAkBjB,EAAekB,WAAU,SAAAF,GAAA,OAA0B,OAAfA,EAAQ,MAClEC,GAAuC,IAArBA,EAAyBA,EAAkBjB,EAAerS,OAE5E,IAAMwT,EAAyBnB,EAAeK,MAAM,EAAGY,GACjDG,EAAsBpB,EAAeK,MAAMY,GAEjDlB,EAAkBhJ,EAAMY,EAAQwJ,GAChCpK,EA5CJ,SAAyBA,EAAMY,EAAQqI,EAAgBQ,GAQnD,GAA8B,KAP9BR,EAAiBA,EAAee,QAAO,SAACM,GACpC,OAAkB,OAAdA,EAAO,KACPb,EAAWpO,KAAKiP,EAAO,KAChB,OAII1T,OAAgB,OAAOoJ,EAE1CyJ,EAAaA,EAAWrM,KAAI,SAAAlM,GAAA,OAAKkY,GAAcxI,EAAQ1P,MAEvD,IAAMqZ,EAAiBf,GAAoBC,EAAYzJ,EAAMY,EAAQqI,GACrE,OAAOjJ,EAAK5C,KAAI,SAACoN,GAIb,IAHA,IAAI3Z,EAAI,EACJ4Z,EAAUF,GAENtM,MAAMqB,QAAQmL,IAClBA,EAAUA,EAAQ/Y,IAAI8Y,EAAIf,EAAW5Y,KAAKsG,QAG9C,OAAOsT,EAAQhN,WAuBZiN,CAAe1K,EAAMY,EAAQyJ,EAAqBD,EAAuBhN,KAAI,SAAAkN,GAAA,OAAUA,EAAO,OAErGP,EAAQY,KAAO3K,EAAK5C,KAAI,SAAAoN,GAAA,OAAOA,EAAII,SACnCb,EAAQ/J,KAAOA,GC/OZ,SAAS6K,GAAa3K,EAAY0B,EAAYkJ,EAAe7B,EAAgBnN,GAKhFA,EAAUvK,OAAO8Q,OAAO,GAJL,CACf0I,QAAQ,EACRC,YAAY,GAEwBlP,GAExC,IAAMmP,EAAS,CACXrK,OAAQ,GACRZ,KAAM,GACN2K,KAAM,IAEJI,EAASjP,EAAQiP,OACjBG,EAAajC,GAAkBA,EAAerS,OAAS,EAEvDuU,EAAa,GAiDnB,GA/CgBL,EAAchJ,MAAM,KAE5B/D,SAAQ,SAACqN,GACb,IAAK,IAAIva,EAAI,EAAGA,EAAIqP,EAAWtJ,OAAQ/F,GAAK,EACxC,GAAIqP,EAAWrP,GAAGO,SAAWga,EAAS,CAClCD,EAAW9P,KAAK6E,EAAWrP,IAC3B,UAMZsa,EAAWpN,SAAQ,SAACyC,GAEhByK,EAAOrK,OAAOvF,KAAKmF,EAAMI,aAGzBmK,GACAE,EAAOrK,OAAOvF,KAAK,CACfjK,KAAMoS,EACN3C,KAAMlN,EAAUE,YAIxB8N,EAAmBC,GAAY,SAAC/Q,GAC5Boa,EAAOjL,KAAK3E,KAAK,IACjB,IAAMgQ,EAAYJ,EAAOjL,KAAKpJ,OAAS,EAEvCuU,EAAWpN,SAAQ,SAACyC,EAAOwF,GACvBiF,EAAOjL,KAAKqL,GAAWrF,EAFf,GAE6BxF,EAAM2F,aAAanG,KAAKnP,MAE7Dka,IACAE,EAAOjL,KAAKqL,GAAWF,EAAWvU,QAAU/F,GAGhDoa,EAAON,KAAKtP,KAAKxK,GAIbqa,GAAcD,EAAOjL,KAAKqL,GAAWhQ,KAAKxK,MAI9Cqa,GACApB,GAASmB,EAAQhC,GAGjBnN,EAAQkP,WAAY,CACpB,IAAMM,EAAUrN,mB,sHAAAA,CAASA,MAAMgN,EAAOrK,OAAOhK,UAASwG,KAAI,iBAAM,MAChE6N,EAAOjL,KAAKjC,SAAQ,SAACkI,GACjBA,EAAMlI,SAAQ,SAACiC,EAAMnP,GACjBya,EAAQza,GAAGwK,KAAK2E,SAGxBiL,EAAOjL,KAAOsL,EAGlB,OAAOL,EC5EJ,SAASM,GAAYxG,EAAKC,GAC7B,IAAMwG,EAAY,GACZ5K,EAAS,GACT6K,EAAgB,GAChBzL,EAAO,GACPqF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc/E,YACtCqL,EAAwBpG,EAAcjF,YACtClP,EAAUiU,EAAcjU,KAAxB,UAAsCmU,EAAcnU,KAG1D,IAAKuO,EAAWoF,EAAI6G,eAAe9J,MAAM,KAAKoF,OAAQlC,EAAI4G,eAAe9J,MAAM,KAAKoF,QAChF,OAAO,KAiBX,SAAS2E,EAAkBC,EAAIxL,EAAWyL,GACtCpK,EAAmBmK,EAAGjG,aAAa,SAAChV,GAChC,IAAMoV,EAAQ,GACV+F,EAAW,GACfP,EAAc1N,SAAQ,SAACkO,GACnB,IAAMna,EAAQwO,EAAU2L,GAAY9F,aAAanG,KAAKnP,GACtDmb,OAAgBla,EAChBmU,EAAMgG,GAAcna,KAEnB0Z,EAAUQ,KACPD,GAAW/L,EAAK3E,KAAK4K,GACzBuF,EAAUQ,IAAY,MASlC,OAjCCjH,EAAI6G,eAAe9J,MAAM,KAAM/D,SAAQ,SAACmL,GACrC,IAAM1I,EAAQkL,EAAsBxC,GACpCtI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B6K,EAAcpQ,KAAKmF,EAAMI,SAASxP,SA2BtCya,EAAkB7G,EAAK2G,GAAuB,GAC9CE,EAAkB9G,EAAK2G,GAAuB,GAEvC,IAAI7Y,GAAUmN,EAAMY,EAAQ,CAAExP,S,sPC5DjC+C,GAAgDD,EAAhDC,IAAKC,GAA2CF,EAA3CE,IAAKG,GAAsCL,EAAtCK,MAAOC,GAA+BN,EAA/BM,KAAMC,GAAyBP,EAAzBO,MAAOC,GAAkBR,EAAlBQ,IAAKL,GAAaH,EAAbG,IAAKC,GAAQJ,EAARI,IAEhD,SAAS4X,GAAkBlF,GACvB,OAAOA,EAAIgD,QAAO,SAAA/K,GAAA,QAAUA,aAAgBkD,MAShD,SAASgK,GAAKnF,GACV,GAAI1H,EAAQ0H,MAAUA,EAAI,aAAc/I,OAAQ,CAC5C,IAAMmO,EAAiBF,GAAkBlF,GAIzC,OAHiBoF,EAAexV,OACZwV,EAAevD,QAAO,SAACC,EAAKuD,GAAN,OAAevD,EAAMuD,IAAM,GAC/ClK,EAAkBK,KAG5C,OAAOL,EAAkBK,KAU7B,SAAS8J,GAAKtF,GACV,GAAI1H,EAAQ0H,MAAUA,EAAI,aAAc/I,OAAQ,CAC5C,IAAMsO,EAAWJ,GAAInF,GACfpJ,EAAMoJ,EAAIpQ,QAAU,EAC1B,OAAQ+G,OAAO6O,MAAMD,IAAaA,aAAoBpK,EAC7CA,EAAkBK,KAAO+J,EAAW3O,EAEjD,OAAOuE,EAAkBK,KAgG7B,IAAMiK,aACDtY,GAAMgY,IADL,MAED/X,GAAMkY,IAFL,MAGDjY,IAzFL,SAAc2S,GACV,GAAI1H,EAAQ0H,MAAUA,EAAI,aAAc/I,OAAQ,CAE5C,IAAMyO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe9V,OAAUqC,KAAK0T,IAAL,MAAA1T,KAAA,GAAYyT,IAAkBvK,EAAkBK,KAErF,OAAOL,EAAkBK,QA+EvB,MAIDlO,IAzEL,SAAc0S,GACV,GAAI1H,EAAQ0H,MAAUA,EAAI,aAAc/I,OAAQ,CAE5C,IAAMyO,EAAiBR,GAAkBlF,GAEzC,OAAQ0F,EAAe9V,OAAUqC,KAAK2T,IAAL,MAAA3T,KAAA,GAAYyT,IAAkBvK,EAAkBK,KAErF,OAAOL,EAAkBK,QA8DvB,MAKDjO,IAzDL,SAAgByS,GACZ,OAAOA,EAAI,MAmDT,MAMDxS,IA/CL,SAAewS,GACX,OAAOA,EAAIA,EAAIpQ,OAAS,MAwCtB,MAODnC,IArCL,SAAgBuS,GACZ,OAAI1H,EAAQ0H,GACDA,EAAIpQ,OAERuL,EAAkBK,QA0BvB,MAQD9N,IAbL,SAAcsS,GACV,OAAO/N,KAAK4T,KAbhB,SAAmB7F,GACf,IAAI8F,EAAOR,GAAItF,GACf,OAAOsF,GAAItF,EAAI5J,KAAI,SAAA2P,GAAA,gBAAQA,EAAMD,EAAS,OAWzBE,CAAShG,OAIxB,IAWAiG,GAAqB9Y,G,0PCzCnB0J,GAjGFqP,G,WACF,aAAe,Y,4FAAA,SACXjY,KAAK4I,MAAQ,IAAIqK,IACjBjT,KAAK4I,MAAM0K,IAAI,aAAc4E,IAE7B5b,OAAO6b,QAAQX,IAAQ1O,SAAQ,SAAC3L,GAC5B,EAAKyL,MAAM0K,IAAInW,EAAI,GAAIA,EAAI,O,oDAc/B,IAAK,UAAOwE,OACR,OAAO3B,KAAK4I,MAAMnM,IAAI,cAG1B,IAAI2b,EAAUA,UAAVA,8BAEJ,GAAuB,mBAAZA,EACPpY,KAAK4I,MAAM0K,IAAI,aAAc8E,OAC1B,CAEH,GADAA,EAAU3R,OAAO2R,IAC6B,IAA1C9b,OAAO0J,KAAKwR,IAAQrR,QAAQiS,GAG5B,MAAM,IAAI1H,MAAJ,WAAqB0H,EAArB,0BAFNpY,KAAK4I,MAAM0K,IAAI,aAAckE,GAAOY,IAK5C,OAAOpY,O,+BAmCD7D,EAAMic,GAAS,WACrB,GAAuB,mBAAZA,EACP,MAAM,IAAI1H,MAAM,gCAMpB,OAHAvU,EAAOsK,OAAOtK,GACd6D,KAAK4I,MAAM0K,IAAInX,EAAMic,GAEd,WAAQ,EAAKC,aAAalc,M,mCAGvBA,GACN6D,KAAK4I,MAAMyK,IAAIlX,IACf6D,KAAK4I,MAAM0P,OAAOnc,K,8BAIjBA,GACL,OAAIA,aAAgBoc,SACTpc,EAEJ6D,KAAK4I,MAAMnM,IAAIN,O,KAgBfqc,IARO,QAHd5P,GAAQ,QAIJA,GAAQ,IAAIqP,IAETrP,I,+YC5Cf,SAAS6P,GAASC,EAAWvN,EAAUwN,EAAUC,GAC7C,IAAMC,EAxDV,SAAsBH,EAAWvN,GAC7B,IAAMuE,EAAS,GAEToJ,EADaJ,EAAUrI,gBACCxE,eAY9B,OAVAvP,OAAO6b,QAAQW,GAAYhQ,SAAQ,YAAW,IAAT3L,EAAS,WACtCgO,GAAYA,EAASxJ,QACU,IAA3BwJ,EAAShF,QAAQhJ,IACjBuS,EAAOtJ,KAAKjJ,GAGhBuS,EAAOtJ,KAAKjJ,MAIbuS,EAyCWqJ,CAAYL,EAAWvN,GACnC6N,EAhCV,SAAwBN,GAA0B,IAAfC,EAAe,uDAAJ,GACpC3C,EAAS,GAETiD,EADaP,EAAUrI,gBACD7E,aACtB0M,EAAaM,GAAaU,iBAchC,OAZA5c,OAAO0J,KAAKiT,GAAUnQ,SAAQ,SAACqQ,GACU,iBAA1BR,EAASQ,KAChBR,EAASQ,GAAeF,EAASE,GAAaC,YAElD,IAAMC,EAAYb,GAAac,QAAQX,EAASQ,IAC5CE,EACArD,EAAOmD,GAAeE,GAEtBrD,EAAOmD,GAAejB,EACtBS,EAASQ,GAAenB,OAGzBhC,EAcYuD,CAAcb,EAAWC,GACtC1N,EAAayN,EAAUrI,gBACvBmJ,EAAgBvO,EAAWI,YAC3BoO,EAASxO,EAAW9O,KACpBud,EAAe,GACfC,EAAa,GACbhO,EAAS,GACTqH,EAAU,GACVjI,EAAO,GACT6O,SAGJtd,OAAO6b,QAAQqB,GAAe1Q,SAAQ,YAAkB,cAAhB3L,EAAgB,KAAXN,EAAW,KACpD,IAAgC,IAA5Bgc,EAAU1S,QAAQhJ,IAAe6b,EAAW7b,GAG5C,OAFAwO,EAAOvF,KAAKqD,EAAQ,GAAI5M,EAAM8O,WAEtB9O,EAAM8O,SAASC,MACvB,KAAKlN,EAAUC,QACXgb,EAAWvT,KAAKjJ,GAChB,MACJ,QACA,KAAKuB,EAAUE,UACX8a,EAAatT,KAAKjJ,OAK9B,IAAI0c,EAAW,EACfnN,EAAmBgM,EAAU9H,aAAa,SAAChV,GACvC,IAAIke,EAAO,GACXJ,EAAa5Q,SAAQ,SAACiR,GAClBD,EAAUA,EAAV,IAAkBN,EAAcO,GAAG7I,aAAanG,KAAKnP,WAEnCsE,IAAlB8S,EAAQ8G,IACR9G,EAAQ8G,GAAQD,EAChB9O,EAAK3E,KAAK,IACVsT,EAAa5Q,SAAQ,SAACiR,GAClBhP,EAAK8O,GAAUE,GAAKP,EAAcO,GAAG7I,aAAanG,KAAKnP,MAE3D+d,EAAW7Q,SAAQ,SAACiR,GAChBhP,EAAK8O,GAAUE,GAAK,CAACP,EAAcO,GAAG7I,aAAanG,KAAKnP,OAE5Die,GAAY,GAEZF,EAAW7Q,SAAQ,SAACiR,GAChBhP,EAAKiI,EAAQ8G,IAAOC,GAAG3T,KAAKoT,EAAcO,GAAG7I,aAAanG,KAAKnP,UAM3E,IAAIoe,EAAc,GACdC,EAAgB,kBAAMvB,EAAUnH,gBAcpC,OAbAxG,EAAKjC,SAAQ,SAACyM,GACV,IAAMvE,EAAQuE,EACdoE,EAAW7Q,SAAQ,SAACiR,GAChB/I,EAAM+I,GAAKf,EAAWe,GAAGxE,EAAIwE,GAAIE,EAAeD,SAGpDpB,GACAA,EAAkBsB,wBAClBN,EAAehB,GAGfgB,EAAe,IAAIhc,GAAUmN,EAAMY,EAAQ,CAAExP,KAAMsd,IAEhDG,EC9HJ,SAASO,GAAmBrK,EAAKC,GACpC,IAIMqK,EAAkB7K,EAJFO,EAAIO,gBACJN,EAAIM,iBAK1B,OAAO,SAACe,EAAWE,GACf,IAAI+I,GAAc,EASlB,OARAD,EAAgBtR,SAAQ,SAACmL,GAGjBoG,IAFAjJ,EAAU6C,GAAWqG,gBACrBhJ,EAAU2C,GAAWqG,gBAAiBD,MAMvCA,GCjBR,SAASE,GAAOzK,EAAKC,GACxB,IAAMwG,EAAY,GACZ5K,EAAS,GACT6K,EAAgB,GAChBzL,EAAO,GACPqF,EAAgBN,EAAIO,gBACpBC,EAAgBP,EAAIM,gBACpBoG,EAAwBrG,EAAc/E,YACtCqL,EAAwBpG,EAAcjF,YACtClP,EAAUiU,EAAcjU,KAAxB,UAAsCmU,EAAcnU,KAG1D,IAAKuO,EAAWoF,EAAI6G,eAAe9J,MAAM,KAAKoF,OAAQlC,EAAI4G,eAAe9J,MAAM,KAAKoF,QAChF,OAAO,KAgBX,SAAS2E,EAAmBC,EAAIxL,GAC5BqB,EAAmBmK,EAAGjG,aAAa,SAAChV,GAChC,IAAMoV,EAAQ,GACV+F,EAAW,GACfP,EAAc1N,SAAQ,SAACkO,GACnB,IAAMna,EAAQwO,EAAU2L,GAAY9F,aAAanG,KAAKnP,GACtDmb,OAAgBla,EAChBmU,EAAMgG,GAAcna,KAEnB0Z,EAAUQ,KACXhM,EAAK3E,KAAK4K,GACVuF,EAAUQ,IAAY,MASlC,OAhCCjH,EAAI6G,eAAe9J,MAAM,KAAM/D,SAAQ,SAACmL,GACrC,IAAM1I,EAAQkL,EAAsBxC,GACpCtI,EAAOvF,KAAKqD,EAAQ,GAAI8B,EAAMI,WAC9B6K,EAAcpQ,KAAKmF,EAAMI,SAASxP,SA0BtCya,EAAkB9G,EAAK2G,GACvBG,EAAkB7G,EAAK2G,GAEhB,IAAI9Y,GAAUmN,EAAMY,EAAQ,CAAExP,SCvDlC,SAASqe,GAAeC,EAAYC,EAAY1K,GACnD,OAAOH,EAAa4K,EAAYC,EAAY1K,GAAU,EAAOhB,EAAME,WAGhE,SAASyL,GAAgBF,EAAYC,EAAY1K,GACpD,OAAOH,EAAa6K,EAAYD,EAAYzK,GAAU,EAAOhB,EAAMG,Y,8PCFlDyL,G,WAUjB,WAAaze,EAAM4O,EAAMY,EAAQvJ,I,4FAAQ,SACrCpC,KAAK7D,KAAOA,EACZ6D,KAAK2L,OAASA,EACd3L,KAAKoC,OAASA,EACdpC,KAAK+K,KAAO/K,KAAK6a,UAAU9P,G,6CAUpBA,GAAM,WACb,OAAOA,EAAK5C,KAAI,SAAAgL,GAAA,OAAS,EAAK/Q,OAAOuE,MAAMwM,EAAO,CAAEpT,OAAQ,EAAK4L,OAAO5L,gB,+PCX3D+a,G,WAQjB,WAAa5J,EAAcvE,I,4FAAY,SACnC3M,KAAKkR,aAAeA,EACpBlR,KAAK2M,WAAaA,E,4CAclB,MAAM,IAAI+D,MAAM,yB,+BAUhB,OAAO1Q,KAAKkR,aAAavF,S,6BAUzB,OAAO3L,KAAKkR,aAAa/U,O,6BAUzB,OAAO6D,KAAKkR,aAAavF,OAAOC,O,gCAUhC,OAAO5L,KAAKkR,aAAavF,OAAOoP,U,oCAUhC,OAAO/a,KAAKkR,aAAavF,OAAOqP,c,oCAUhC,OAAOhb,KAAKkR,aAAavF,OAAOsP,aAAejb,KAAKkR,aAAavF,OAAOxP,O,6BASpE,WACE4O,EAAO,GAIb,OAHA2B,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjCmP,EAAK3E,KAAK,EAAK8K,aAAanG,KAAKnP,OAE9BmP,I,sCAUP,MAAM,IAAI2F,MAAM,0B,gCA9FhB,MAAM,IAAIA,MAAM,yB,8BAyIhB,MAvCgB,CACZwK,QAAS,GACTC,SAAUnb,KACViU,UAHY,SAGF9X,GAEN,OADA6D,KAAKkb,QAAQ/e,KAAOA,EACb6D,MAEX2L,OAPY,SAOLA,GAEH,OADA3L,KAAKkb,QAAQvP,OAASA,EACf3L,MAEX+K,KAXY,SAWPA,GAED,OADA/K,KAAKkb,QAAQnQ,KAAOA,EACb/K,MAEXkR,aAfY,SAeCA,GAET,OADAlR,KAAKkb,QAAQhK,aAAeA,EACrBlR,MAEX2M,WAnBY,SAmBDA,GAEP,OADA3M,KAAKkb,QAAQvO,WAAaA,EACnB3M,MAEXob,MAvBY,WAwBR,IAAIlK,EAAe,KACnB,GAAIlR,KAAKkb,QAAQhK,wBAAwB0J,GACrC1J,EAAelR,KAAKkb,QAAQhK,iBACzB,KAAIlR,KAAKkb,QAAQvP,SAAU3L,KAAKkb,QAAQnQ,KAO3C,MAAM,IAAI2F,MAAM,4BANhBQ,EAAe,IAAI0J,GAAa5a,KAAKkb,QAAQ/e,KACzB6D,KAAKkb,QAAQnQ,KACb/K,KAAKkb,QAAQvP,OACb3L,KAAKmb,SAAS/Y,UAKtC,OAAO,IAAIpC,KAAKmb,SAASjK,EAAclR,KAAKkb,QAAQvO,kB,+PCjK/C0O,G,stBAYb,OAHKrb,KAAKsb,gBACNtb,KAAKsb,cAAgBtb,KAAKub,uBAEvBvb,KAAKsb,gB,4CAUZ,MAAM,IAAI5K,MAAM,yB,sCAWhB,OAAO1Q,KAAK+K,W,GAjCmB+P,I,0PCElBU,G,stBAYb,OAHKxb,KAAKsb,gBACNtb,KAAKsb,cAAgBtb,KAAKub,uBAEvBvb,KAAKsb,gB,6BAUZ,OAAOtb,KAAKkR,aAAavF,OAAO8P,O,iCAUhC,OAAOzb,KAAKkR,aAAavF,OAAOyN,UAAYpB,K,qCAShC,IACJ0D,EAAiB1b,KAAKkR,aAAavF,OAAnC+P,aACR,OAAOA,aAAwBnD,SAAWmD,EAAe7Q,I,4CAUzD,MAAM,IAAI6F,MAAM,yB,sCAWhB,OAAO1Q,KAAK+K,W,GAhEiB+P,I,0PCLhBa,G,yKAQb,MAAM,IAAIjL,MAAM,2B,+PCJHkL,G,mtBAQVza,GAQH,OALK+L,EAAkB2O,UAAU1a,GAGpB+L,EAAkB4O,eAAe3a,GAFjCsF,OAAOtF,GAAK4a,W,GAZcJ,I,0PCC1BK,G,utBASb,OAAO5d,EAAiBC,c,4CAUL,WACbyb,EAAO,IAAImC,IACXC,EAAS,GAUf,OAPAxP,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMuX,EAAQ,EAAKjC,aAAanG,KAAKnP,GAChCke,EAAKzG,IAAIF,KACV2G,EAAKqC,IAAIhJ,GACT+I,EAAO9V,KAAK+M,OAGb+I,K,gCAIP,OAAO,IAAIN,O,GAnCsBP,I,0PCApBe,G,mtBASVjb,E,GAAiB,IAAVpB,EAAU,EAAVA,OACN6D,SAKJ,GAHK5D,KAAKqc,OACNrc,KAAKqc,KAAO,IAAIvc,EAAkBC,IAEjCmN,EAAkB2O,UAAU1a,GAI7ByC,EAASsJ,EAAkB4O,eAAe3a,OAJP,CACnC,IAAIhB,EAAaH,KAAKqc,KAAK5T,cAActH,GACzCyC,EAASzD,EAAaA,EAAWoK,UAAY2C,EAAkBM,GAInE,OAAO5J,M,GArB6B+X,ICR/BW,GAA4B,SAACvR,EAAM4B,GAC5C,IAAI+K,EAAMhP,OAAO6T,kBACb5E,EAAMjP,OAAO8T,kBAiBjB,OAdA9P,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMuX,EAAQpI,EAAKnP,GACfuX,aAAiBjG,IAIjBiG,EAAQuE,IACRA,EAAMvE,GAENA,EAAQwE,IACRA,EAAMxE,OAIP,CAACuE,EAAKC,I,0PCRI8E,G,YAQjB,WAAavL,EAAcvE,I,4FAAY,e,iKAAA,wDAC7BuE,EAAcvE,IADe,OAGnC,EAAK+P,eAAiB,KAHa,E,wXAcnC,OAAOJ,GAA0Btc,KAAKkR,aAAanG,KAAM/K,KAAK2M,c,qDAW9D,GAAI3M,KAAK0c,eACL,OAAO1c,KAAK0c,eAUhB,IAPA,IAAMC,EAAa3c,KAAK+K,OAAOgK,QAAO,SAAA/K,GAAA,QAAUA,aAAgBkD,MAAoB+E,MAAK,SAACjP,EAAGO,GAAJ,OAAUP,EAAIO,KACjGqZ,EAAQD,EAAWhb,OACrBkb,EAAUnU,OAAO6T,kBACjBO,SACAC,SACAC,EAAiB,EAEZphB,EAAI,EAAGA,EAAIghB,EAAOhhB,IACvBkhB,EAAYH,EAAW/gB,EAAI,IAC3BmhB,EAAYJ,EAAW/gB,MAELkhB,IAIlBD,EAAU7Y,KAAK0T,IAAImF,EAASE,EAAYJ,EAAW/gB,EAAI,IACvDohB,KAQJ,OALKA,IACDH,EAAU,MAEd7c,KAAK0c,eAAiBG,EAEf7c,KAAK0c,iB,+BAUZ,OAAO1c,KAAKkR,aAAavF,OAAO5L,S,sCAUnB,WACPgL,EAAO,GACPkS,EAAajd,KAAKD,SAaxB,OAXA2M,EAAmB1M,KAAK2M,YAAY,SAAC/Q,GACjC,IAAMuX,EAAQ,EAAKjC,aAAanG,KAAKnP,GAErC,GAAIsR,EAAkB2O,UAAU1I,KAAY8J,GAAcvU,OAAOrH,SAAS8R,GAAS,CAE/E,IAAM+J,EAAchQ,EAAkB4O,eAAe3I,IAAUA,EAC/DpI,EAAK3E,KAAK8W,QAEVnS,EAAK3E,KAAKtG,EAAkBwG,SAAS6M,EAAO8J,OAG7ClS,K,gCAIP,OAAO,IAAIqR,O,GApGmBf,I,0PCJjB8B,G,mtBAQVhc,GAEHA,EAAMsF,OAAOtF,GACb,IAAIyC,SAEJ,GAAKsJ,EAAkB2O,UAAU1a,GAK7ByC,EAASsJ,EAAkB4O,eAAe3a,OALP,CACnC,IAAIic,EAAUjc,EAAIoH,MALR,2DAMV3E,EAASwZ,EAAa1U,OAAO2U,WAAWD,EAAQ,IAAvC,IAA8C1U,OAAO2U,WAAWD,EAAQ,IAC9DlQ,EAAkBM,GAIzC,OAAO5J,M,GApB2B+X,I,0PCArB2B,G,muBASb,IAAMC,EAAUvd,KAAKkR,aAAavF,OAAO6R,KACzC,MAAO,CAACD,EAAQ,GAAIA,EAAQA,EAAQ5b,OAAS,M,6BAU7C,OAAO3B,KAAKkR,aAAavF,OAAO6R,Q,gCAIhC,OAAO,IAAIL,O,GAxBiB9B,I,0PCAfoC,G,mtBAQVtc,GACH,IAAIyC,SAEJ,GAAKsJ,EAAkB2O,UAAU1a,GAI7ByC,EAASsJ,EAAkB4O,eAAe3a,OAJP,CACnC,IAAIC,EAAYic,WAAWlc,EAAK,IAChCyC,EAAS8E,OAAO6O,MAAMnW,GAAa8L,EAAkBM,GAAKpM,EAI9D,OAAOwC,M,GAjB+B+X,I,0PCEzB+B,G,utBASb,OAAOlf,EAAeC,a,4CAWtB,OAAO6d,GAA0Btc,KAAKkR,aAAanG,KAAM/K,KAAK2M,e,gCAI9D,OAAO,IAAI8Q,O,GAxBqBjC,I,0PCLlCmC,G,WACF,c,4FAAc,SACV3d,KAAK4d,WAAa,IAAI3K,I,qDAGR8H,EAAS8C,GAEvB,OADA7d,KAAK4d,WAAWtK,IAAIyH,EAAS8C,GACtB7d,O,0BAGP4L,GACA,OAAO5L,KAAK4d,WAAWvK,IAAIzH,K,0BAG3BA,GACA,OAAO5L,KAAK4d,WAAWnhB,IAAImP,O,KAI7BkS,GAAwB,SAAClV,GAC3BA,EACiBmV,kBAAkB3f,EAAiBC,YAAa2d,IAChD+B,kBAAkB3f,EAAiBE,SAAUme,IAC7CsB,kBAAkB3f,EAAiBG,OAAQ+e,IAC3CS,kBAAkBvf,EAAeC,WAAYif,KAanDM,GAVQ,WACnB,IAAIpV,EAAQ,KAMZ,OAAOA,IAJHA,EAAQ,IAAI+U,GACZG,GAAsBlV,GACfA,GALQ,GCgChB,SAASqV,GAAaC,EAAYvS,EAAQwS,GAC7C,IAAMC,EAAa,GAUnB,OARMD,GAAWA,EAAQxc,SACrBwc,EAAUxS,EAAOxD,KAAI,SAAA6B,GAAA,OAAQA,EAAK7N,SAGtCgiB,EAAQrV,SAAQ,SAACuV,EAAQziB,GACrBwiB,EAAWC,GAAUziB,KAGlB+P,EAAOxD,KAAI,SAAA6B,GAAA,OAnEtB,SAAyBe,EAAMY,GAG3B,OAFAZ,EAAOA,GAAQ,GAEXiT,GAAc3K,IAAI1H,EAAOoP,SAClBiD,GAAcvhB,IAAIkP,EAAOoP,SACfuD,QACArK,UAAUtI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WALV,MAK0B5B,EAAKpJ,OAAS,IAC9ByZ,QAEd4C,GACUvhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrFigB,QACArK,UAAUtI,EAAOxP,MACjBwP,OAAOA,GACPZ,KAAKA,GACL4B,WANV,MAM0B5B,EAAKpJ,OAAS,IAC9ByZ,QAgDSmD,CAAgBL,EAAWE,EAAWpU,EAAK7N,OAAQ6N,MC3ElE,QACXiT,WAAYlf,EAAWI,M,0PCANqgB,G,WACjB,WAAY5S,I,4FAAM,SACd5L,KAAKye,MAAQ7S,E,6CAQb,MAAM,IAAI8E,MAAM,qC,2BAJhB,OAAO1Q,KAAKye,U,KCThBC,GAAM,GACNC,GAAM,GACNC,GAAQ,GACRC,GAAU,GACVC,GAAS,GAEb,SAASC,GAAgBC,GACvB,OAAO,IAAIzG,SAAS,IAAK,WAAayG,EAAQ7W,KAAI,SAAShM,EAAMP,GAC/D,OAAOqjB,KAAKC,UAAU/iB,GAAQ,OAASP,EAAI,aAC1CqH,KAAK,KAAO,KAWjB,SAASkc,GAAaC,GACpB,IAAIC,EAAY/iB,OAAOY,OAAO,MAC1B8hB,EAAU,GAUd,OARAI,EAAKtW,SAAQ,SAASyM,GACpB,IAAK,IAAI+J,KAAU/J,EACX+J,KAAUD,GACdL,EAAQ5Y,KAAKiZ,EAAUC,GAAUA,MAKhCN,EAGT,SAASnf,GAAIhD,EAAO0iB,GAClB,IAAI5hB,EAAId,EAAQ,GAAI8E,EAAShE,EAAEgE,OAC/B,OAAOA,EAAS4d,EAAQ,IAAIvW,MAAMuW,EAAQ5d,EAAS,GAAGsB,KAAK,GAAKtF,EAAIA,EAStE,SAAS6hB,GAAW7f,GAClB,IAPkBwE,EAOd3B,EAAQ7C,EAAK8f,cACbC,EAAU/f,EAAKggB,gBACfC,EAAUjgB,EAAKkgB,gBACfC,EAAengB,EAAKogB,qBACxB,OAAOxI,MAAM5X,GAAQ,iBAXHwE,EAYDxE,EAAKqgB,kBAXR,EAAI,IAAMngB,IAAKsE,EAAM,GAC/BA,EAAO,KAAO,IAAMtE,GAAIsE,EAAM,GAC9BtE,GAAIsE,EAAM,IAS+B,IAAMtE,GAAIF,EAAKsgB,cAAgB,EAAG,GAAK,IAAMpgB,GAAIF,EAAKugB,aAAc,IAC1GJ,EAAe,IAAMjgB,GAAI2C,EAAO,GAAK,IAAM3C,GAAI6f,EAAS,GAAK,IAAM7f,GAAI+f,EAAS,GAAK,IAAM/f,GAAIigB,EAAc,GAAK,IACnHF,EAAU,IAAM/f,GAAI2C,EAAO,GAAK,IAAM3C,GAAI6f,EAAS,GAAK,IAAM7f,GAAI+f,EAAS,GAAK,IAChFF,GAAWld,EAAQ,IAAM3C,GAAI2C,EAAO,GAAK,IAAM3C,GAAI6f,EAAS,GAAK,IACjE,IAGO,gBAASS,GACtB,IAAIC,EAAW,IAAIhgB,OAAO,KAAQ+f,EAAY,SAC1CE,EAAYF,EAAUG,WAAW,GAWrC,SAASC,EAAUjgB,EAAMgU,GACvB,IAIIxX,EAJAsiB,EAAO,GACPoB,EAAIlgB,EAAKqB,OACT8e,EAAI,EACJpjB,EAAI,EAEJqjB,EAAMF,GAAK,EACXG,GAAM,EAMV,SAASta,IACP,GAAIqa,EAAK,OAAO/B,GAChB,GAAIgC,EAAK,OAAOA,GAAM,EAAOjC,GAG7B,IAAI9iB,EAAUK,EAAP2kB,EAAIH,EACX,GAAIngB,EAAKggB,WAAWM,KAAOhC,GAAO,CAChC,KAAO6B,IAAMD,GAAKlgB,EAAKggB,WAAWG,KAAO7B,IAASte,EAAKggB,aAAaG,KAAO7B,KAI3E,OAHKhjB,EAAI6kB,IAAMD,EAAGE,GAAM,GACdzkB,EAAIqE,EAAKggB,WAAWG,QAAU5B,GAAS8B,GAAM,EAC9C1kB,IAAM6iB,KAAU6B,GAAM,EAAUrgB,EAAKggB,WAAWG,KAAO5B,MAAW4B,GACpEngB,EAAK+T,MAAMuM,EAAI,EAAGhlB,EAAI,GAAG2E,QAAQ,MAAO,KAIjD,KAAOkgB,EAAID,GAAG,CACZ,IAAKvkB,EAAIqE,EAAKggB,WAAW1kB,EAAI6kB,QAAU5B,GAAS8B,GAAM,OACjD,GAAI1kB,IAAM6iB,GAAU6B,GAAM,EAAUrgB,EAAKggB,WAAWG,KAAO5B,MAAW4B,OACtE,GAAIxkB,IAAMokB,EAAW,SAC1B,OAAO/f,EAAK+T,MAAMuM,EAAGhlB,GAIvB,OAAO8kB,GAAM,EAAMpgB,EAAK+T,MAAMuM,EAAGJ,GAGnC,IA7BIlgB,EAAKggB,WAAWE,EAAI,KAAO3B,MAAW2B,EACtClgB,EAAKggB,WAAWE,EAAI,KAAO1B,MAAU0B,GA4BjC1jB,EAAIuJ,OAAasY,IAAK,CAE5B,IADA,IAAIpJ,EAAM,GACHzY,IAAM4hB,IAAO5hB,IAAM6hB,IAAKpJ,EAAInP,KAAKtJ,GAAIA,EAAIuJ,IAC5CiO,GAA4B,OAAtBiB,EAAMjB,EAAEiB,EAAKlY,OACvB+hB,EAAKhZ,KAAKmP,GAGZ,OAAO6J,EAGT,SAASyB,EAAczB,EAAMJ,GAC3B,OAAOI,EAAKjX,KAAI,SAASoN,GACvB,OAAOyJ,EAAQ7W,KAAI,SAASmX,GAC1B,OAAOwB,EAAYvL,EAAI+J,OACtBrc,KAAKkd,MAkBZ,SAASY,EAAUxL,GACjB,OAAOA,EAAIpN,IAAI2Y,GAAa7d,KAAKkd,GAGnC,SAASW,EAAYjkB,GACnB,OAAgB,MAATA,EAAgB,GACjBA,aAAiB+C,KAAO4f,GAAW3iB,GACnCujB,EAASY,KAAKnkB,GAAS,IAAM,IAAOA,EAAM0D,QAAQ,KAAM,MAAU,IAClE1D,EAGR,MAAO,CACL8J,MA5FF,SAAerG,EAAMgU,GACnB,IAAI2M,EAASjC,EAASI,EAAOmB,EAAUjgB,GAAM,SAASiV,EAAK3Z,GACzD,GAAIqlB,EAAS,OAAOA,EAAQ1L,EAAK3Z,EAAI,GACrCojB,EAAUzJ,EAAK0L,EAAU3M,EAtD/B,SAAyB0K,EAAS1K,GAChC,IAAIhX,EAASyhB,GAAgBC,GAC7B,OAAO,SAASzJ,EAAK3Z,GACnB,OAAO0Y,EAAEhX,EAAOiY,GAAM3Z,EAAGojB,IAmDMkC,CAAgB3L,EAAKjB,GAAKyK,GAAgBxJ,MAGzE,OADA6J,EAAKJ,QAAUA,GAAW,GACnBI,GAuFPmB,UAAWA,EACXxgB,OA5BF,SAAgBqf,EAAMJ,GAEpB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrC,CAACJ,EAAQ7W,IAAI2Y,GAAa7d,KAAKkd,IAAYgB,OAAON,EAAczB,EAAMJ,IAAU/b,KAAK,OA2B5Fme,WAxBF,SAAoBhC,EAAMJ,GAExB,OADe,MAAXA,IAAiBA,EAAUG,GAAaC,IACrCyB,EAAczB,EAAMJ,GAAS/b,KAAK,OAuBzCoe,WApBF,SAAoBjC,GAClB,OAAOA,EAAKjX,IAAI4Y,GAAW9d,KAAK,OAoBhC8d,UAAWA,EACXD,YAAaA,IC3GFQ,OAnCf,SAAgBvP,EAAKpG,EAAQ9E,GACzB,IAAKmC,MAAMqB,QAAQsB,GACf,MAAM,IAAI+E,MAAM,iDAEpB,IAGM6Q,EAAe5V,EAAOxD,KAAI,SAAAqZ,GAAA,OAAcA,EAAWrlB,QACzD0K,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBqU,gBAAgB,GAGuB5a,GAE3C,IAAMmY,EAAU,GACV5Y,EAAOsb,EAAY1C,GAErBb,EAAUoD,EACV1a,EAAQ4a,iBAGRtD,EAAUpM,EAAIvK,OAAO,EAAG,GAAG,IAG/B,IAAMma,EAAYxD,EAAQvK,QAAO,SAACC,EAAK+N,EAAGhmB,GAAT,OAC7BU,OAAO8Q,OAAOyG,G,EAAYjY,G,EAAJgmB,K,EAAtB,I,sGACD,IAUH,OARA7P,EAAIjJ,SAAQ,SAACD,GACT,IAAM0C,EAAQ,GAKd,OAJAgW,EAAazY,SAAQ,SAAC+Y,GAClB,IAAMC,EAAYH,EAAUE,GAC5BtW,EAAMnF,KAAKyC,EAAOiZ,OAEf1b,eAAQmF,MAEZ,CAACgW,EAAcvC,IChBX+C,OAXf,SAAiB5X,EAAKwB,EAAQ9E,GAK1BA,EAAUvK,OAAO8Q,OAAO,GAJF,CAClBqU,gBAAgB,EAChBO,eAAgB,KAEuBnb,GAE3C,IAAMob,EAAMC,GAAMrb,EAAQmb,gBAC1B,OAAOV,GAAOW,EAAI1B,UAAUpW,GAAMwB,EAAQ9E,I,0PC5BzBsb,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJpkB,EAAWE,U,0WAGb8M,EAAMY,EAAQ9E,GAClB,OAAOkb,GAAOhX,EAAMY,EAAQ9E,O,GANY2X,ICqDjC4D,OA7Bf,SAAmBrQ,EAAKpG,GACpB,IAAK3C,MAAMqB,QAAQsB,GACf,MAAM,IAAI+E,MAAM,iDAGpB,IAAM2N,EAAS,GACXziB,EAAI,EACJymB,SACErD,EAAU,GACV5Y,EAAOsb,EAAY1C,GACnBsD,EAAmB3W,EAAOxD,KAAI,SAAAqZ,GAAA,OAAcA,EAAWrlB,QAgB7D,OAdA4V,EAAIjJ,SAAQ,SAACkB,GACT,IAAMnB,EAAS,GACfyZ,EAAiBxZ,SAAQ,SAAC0Y,GAClBA,KAAcnD,EACdgE,EAAiBhE,EAAOmD,IAExBnD,EAAOmD,GAAc5lB,IACrBymB,EAAiBzmB,EAAI,GAEzBiN,EAAOwZ,GAAkBrY,EAAKwX,MAElCpb,eAAQyC,MAGL,CAACvM,OAAO0J,KAAKqY,GAASW,I,0PClDZuD,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJxkB,EAAWC,Y,0WAGb+M,EAAMY,EAAQ9E,GAClB,OAAOub,GAASrX,EAAMY,EAAQ9E,O,GANK2X,I,0PCAtBgE,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJzkB,EAAWG,U,0WAGb6M,EAAMY,EAAQ9E,GAClB,OAAOya,GAAOvW,EAAMY,EAAQ9E,O,GANW2X,ICmBhCiE,OAXf,SAAe1X,EAAMY,EAAQ9E,GACzB,IAAM6b,EAAa,CAAEN,YAAUL,UAAQT,WACjCrE,EAAanS,EAAiBC,GAEpC,IAAKkS,EACD,MAAM,IAAIvM,MAAM,mCAGpB,OAAOgS,EAAWzF,GAAYlS,EAAMY,EAAQ9E,I,0PChB3B8b,G,YACjB,aAAc,O,4FAAA,S,iKAAA,wDACJ5kB,EAAWI,O,0WAGb4M,EAAMY,EAAQ9E,GAClB,OAAO1I,GAAK4M,EAAMY,EAAQ9E,O,GANa2X,I,gQCDzCoE,G,WACF,c,4FAAc,SACV5iB,KAAK4I,MAAQ,IAAIqK,IACjBjT,KAAK0iB,WAAW1iB,KAAK6iB,yB,2DAIrB,MAAO,CACH,IAAIV,GACJ,IAAIK,GACJ,IAAID,GACJ,IAAII,M,mCASgB,WAAjBD,EAAiB,uDAAJ,GAEpB,OADAA,EAAW5Z,SAAQ,SAAAga,GAAA,OAAa,EAAKla,MAAM0K,IAAIwP,EAAUlX,KAAMkX,MACxD9iB,KAAK4I,Q,+BAQPka,GACL,OAAIA,aAAqBtE,IACrBxe,KAAK4I,MAAM0K,IAAIwP,EAAUlX,KAAMkX,GACxB9iB,MAEJ,O,iCASA8iB,GAEP,OADA9iB,KAAK4I,MAAM0P,OAAOwK,EAAUlX,MACrB5L,O,0BAGP7D,GACA,OAAI6D,KAAK4I,MAAMyK,IAAIlX,GACR6D,KAAK4I,MAAMnM,IAAIN,GAEnB,S,KAeA4mB,GAVS,WACpB,IAAIna,EAAQ,KAMZ,OAAOA,IAHHA,EAAQ,IAAIga,IAJI,G,ioBCpCjB,SAASvR,GAAiBxI,GAC7B,IAAMma,EAAO,GAEb,IAAK,IAAM7lB,KAAO0L,EACdma,EAAK7lB,GAAO,IAAI6O,EAAMnD,EAAO1L,GAAK+O,eAAgBrD,EAAO1L,GAAK8O,SAAU9O,GAE5E,OAAO6lB,EAGJ,IAAMC,GAAe,SAAC,EAA6BC,EAAmBC,GAAmB,cAAlExW,EAAkE,KAAtDkJ,EAAsD,KACxFuN,EAASvN,EAAclU,OAASkU,EAAchJ,MAAM,KAAO,GAC3DwW,EAAkBH,EAAkB7X,YACpCiY,EAAYF,EAAOjb,KAAI,SAAAob,GAAA,ObGxB,SAAoCrS,EAAcvE,GAAY,IACzDhB,EAAWuF,EAAXvF,OAER,OAAIqS,GAAc3K,IAAI1H,EAAOoP,SAClBiD,GAAcvhB,IAAIkP,EAAOoP,SACfuD,QACApN,aAAaA,GACbvE,WAAWA,GACXyO,QAEd4C,GACUvhB,IAAIkP,EAAOC,OAASlN,EAAUC,QAAUH,EAAeC,WAAaL,EAAiBC,aACrFigB,QACApN,aAAaA,GACbvE,WAAWA,GACXyO,QalBkBoI,CAA2BH,EAAgBE,GAAMrS,aAAcvE,MAClG,OAAO1B,EAAWC,gBAAgBoY,EAAWH,IAGpCM,GAA2B,SAACC,EAAOC,GAAuC,IACzC,EADaxW,EAA4B,uDAAnB,GAAIyW,EAAe,aAC/ED,IAAcnV,EAAeI,SAC7B8U,EAAMG,YAAYliB,OAAS,GAC3B,EAAA+hB,EAAMG,aAAYzd,KAAlB,WAA0Bwd,KAE1BF,EAAMG,YAAYzd,KAAK,CACnB0d,GAAIH,EACJI,KAAM5W,EACN6W,SAAUJ,KAITK,GAA4B,SAACC,EAAUC,GAAU,OAC1D,EAAAA,EAAMC,qBAAoBhe,KAA1B,WAAkC8d,EAASE,qBAA3C,UAAmEF,EAASL,gBAGnEQ,GAAqB,SAACH,EAAUR,EAAOC,GAAuC,IAA5BxW,EAA4B,uDAAnB,GAAIyW,EAAe,aACvFH,GAAyBC,EAAOC,EAAWxW,EAAQyW,GACnDK,GAA0BC,EAAUR,IAGlCY,aACDzlB,EAAcC,OAAS,CACpBylB,UAAW,CAAC,cACZC,SAAU,EAAC,GAAM,KAHnB,MAKD3lB,EAAcE,QAAU,CACrBwlB,UAAW,CAAC,oBACZC,SAAU,EAAC,GAAO,KAPpB,MASD3lB,EAAcG,IAAM,CACjBulB,UAAW,CAAC,aAAc,oBAC1BC,SAAU,EAAC,GAAM,KAXnB,IAeAC,GAAqB,SAAC9X,EAAY/Q,EAAG8oB,GACvC,IAA2B,IAAvBA,GAA4B9oB,IAAO8oB,EAAoB,EAAI,CAC3D,IAAMC,EAAKhY,EAAWhL,OAAS,EAE/BgL,EAAWgY,GAAShY,EAAWgY,GAAI9X,MAAM,KAAK,GAA9C,IAAoDjR,OAEpD+Q,EAAWvG,KAAX,GAAmBxK,IAIdgpB,GAA2B,SAACjY,EAAYkY,EAAS9nB,GAC1D,IAEM+nB,EAAgB,GAChBC,EAAgB,GAJ6C,KAM9BT,GAAcvnB,GAAMynB,SANU,GAM5DQ,EAN4D,KAM9CC,EAN8C,KAanE,OALAvY,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMspB,EAAgBL,EAAQjpB,GAC9BspB,GAAiBF,GAAgBP,GAAmBK,EAAelpB,GAT5C,IAUtBspB,GAAiBD,GAAgBR,GAAmBM,EAAenpB,GAT7C,MAWpB,CACH+Q,WAAYmY,EAAc7hB,KAAK,KAC/BkiB,iBAAkBJ,EAAc9hB,KAAK,OAKhCmiB,GAA0B,SAACzY,EAAYkY,EAAS9nB,EAAM2c,EAAcF,GAC7E,IAAIkL,EAAoB,GAClBW,EAAkB,GAClBC,EAAe,GAyBrB,OAvBA5Y,EAAmBC,GAAY,SAAC/Q,GAC5B,GAAIipB,EAAQjpB,GAAI,CACZ,IAAIke,EAAO,GAEPyL,EAAe,CAAEvf,KAAM,IAE3B0T,EAAa5Q,SAAQ,SAACiR,GAClB,IAAMhP,EAAOyO,EAAcO,GAAG7I,aAAanG,KAAKnP,GAChDke,EAAUA,EAAV,IAAkB/O,EAClBwa,EAAavf,KAAK+T,GAAKhP,UAGG7K,IAA1BmlB,EAAgBvL,KAChBuL,EAAgBvL,GAAQ,GACxB4K,EAAkB5K,IAAS,EAC3BwL,EAAaxL,GAAQyL,GAGzBd,GAAmBY,EAAgBvL,GAAOle,EAAG8oB,EAAkB5K,IAC/D4K,EAAkB5K,GAAQle,MAI3B,CACHypB,kBACAC,iBAKKE,GAAe,SAACC,EAAUC,EAAUvY,EAAQ+W,EAAUyB,GAC/D,IAAI3L,EAAc,GACdC,EAAgB,kBAAMiK,EAAS3S,gBAC3BxU,EAASoQ,EAATpQ,KACF4P,EAAa8Y,EAAS7U,YACtBgV,EAAqBH,EAASI,mBAAmBC,oBASvD,OAAOH,EAAShZ,GAPS,SAAAzK,GAAA,OAASwjB,EAC9BE,EAAmB1jB,GACnBA,EACA+X,EACAD,KAG0Cjd,IAGrCgpB,GAAqB,SAACrC,GAC/B,IAAM+B,EAAW/B,EAAMsC,OAAM,GACvB9C,EAAoBQ,EAAMuC,uBAShC,OARAR,EAAS9O,eAAiBuM,EAAkBra,OAAOV,KAAI,SAAAmM,GAAA,OAAKA,EAAEnY,UAAQ8G,KAAK,KAG3EigB,EAAkB5X,iBAAmB,KACrC4X,EAAkBnX,iBAAmB,KACrCmX,EAAkBxX,eAAiB,KACnC+Z,EAASvL,wBAAwBgM,wBAE1BT,GAGLU,GAAS,SAACpU,EAAKhH,EAAMqb,EAAIC,GAG3B,IAFA,IAAIlpB,EAAMipB,EAAGrU,EAAKhH,EAAM,EAAGsb,GAElBzqB,EAAI,EAAG+M,EAAMoJ,EAAIpQ,OAAQ/F,EAAI+M,EAAK/M,IACvCuB,EAASA,EAAT,IAAgBipB,EAAGrU,EAAKhH,EAAMnP,EAAGyqB,GAErC,OAAOlpB,GAGLmpB,GAAQ,SAACvU,EAAKlJ,EAAQiL,EAAKuS,GAC7B,IAAM9a,EAAQwG,EAAI+B,GAElB,OADYvI,IAAUgD,EAAS8X,EAAQxd,EAAO0C,GAAO+O,eAInDiM,GAAgB,SAACplB,EAAK+a,GAExB,OADkBA,EAAO,aAAclT,MAAQkT,EAAS,CAACA,IACxCsK,MAAK,SAAAC,GAAA,OAAOtlB,GAAOslB,EAAI,IAAMtlB,GAAOslB,EAAI,OAGvDC,aACDloB,EAAeC,WAAa8nB,IAD3B,MAEDnoB,EAAiBE,SAAWioB,IAF3B,IAKAI,GAAiB,SAAC9pB,EAAOqf,EAAQ0K,GAAhB,OAA8BF,GAAcE,GAAW/pB,EAAOqf,IAExE2K,GAAyB,SAACnD,EAAOoD,GAA4B,IAAhB3Z,EAAgB,uDAAP,GAC3D4Z,EAAM,GACJpD,EAAYxW,EAAOwW,WAAarU,EAFgC,EAGAnC,EAA9D6Z,mBAH8D,WAGA7Z,EAA1C8Z,uBAH0C,WAGA9Z,EAAjB6Y,aAHiB,SAIhEkB,EAAclB,EAAQD,GAAmBrC,GAASA,EAClDyD,EAAoBD,EAAYE,kBAKlCL,EAHCD,EAAWnlB,OAGNmlB,EAAW3e,KAAI,SAAAkf,GAAA,OAAc,YAAuB,QAApBrD,gBAAoB,MAAT,GAAS,IACZA,EAAlCsD,mBAD8C,MAChC,CAAC,GAAI,IAD2B,EACtB9lB,EAAUwiB,EAAVxiB,MADsB,KAEjB8lB,EAFiB,UAEjDC,OAFiD,MAEpC,GAFoC,SAEhCC,OAFgC,MAEvB,GAFuB,EAGhDC,EAAUF,EAAW3T,QAAO,SAACzL,EAAKhM,EAAMP,GAE1C,OADAuM,EAAIhM,GAAQP,EACLuM,IACR,IAGGuf,GAFNH,EAAaA,EAAWxS,QAAO,SAAAxJ,GAAA,OAAUA,KAAS4b,GAC9CA,EAAkB5b,GAAOoc,IAAI/b,OAASlN,EAAUE,WAAc2M,IAAUgD,MACpD5M,OAClBimB,EAAY,GAElB,GAAIF,EACA,IADM,eACG9rB,EAAO+M,GACZ,IAAM4M,EAAM+R,EAAY1rB,GAClBuB,EAAMA,GAAGoqB,EAAWpf,KAAI,SAACoD,GAC3B,IAAMuI,EAAM2T,EAAQlc,GACpB,OAAOgK,EAAIzB,MAEf8T,EAAUzqB,GAAO,GANZvB,EAAI,EAAG+M,EAAM2e,EAAY3lB,OAAQ/F,EAAI+M,EAAK/M,IAAK,EAA/CA,GASb,IAAIisB,EAAYvrB,OAAO0J,KAAKxE,GAAS,IAAIuT,QAAO,SAAAxJ,GAAA,OAASA,KAAS4b,KAC5DW,EAAUN,EAAO7lB,QAAUkmB,EAAUlmB,OAU3C,OARKslB,IACDY,EAAYA,EAAU9S,QAAO,SAAAxJ,GAAA,OAAS4b,EAAkB5b,GAAOoc,IAAI/b,OAASlN,EAAUC,YAGrFqoB,IACDa,EAAYA,EAAU9S,QAAO,SAAAxJ,GAAA,OAAS4b,EAAkB5b,GAAOoc,IAAI/b,OAASlN,EAAUE,cAGnFkpB,EAAU,SAACjf,EAAQjN,GACtB,IAAImsB,GAAU,EAKd,OAJIf,IACAe,GAAUL,GAAOE,EAAUzB,GAAOoB,EAAY1e,EAAQyd,GAAO1qB,KAG1DisB,EAAUG,OAAM,SAACzc,GACpB,IAAMpK,EAAM0H,EAAO0C,GAAO+O,cAC1B,OAAOqM,GAAexlB,EAAKK,EAAM+J,GAAQ4b,EAAkB5b,GAAOoc,IAAI5M,aACpEgN,GACN,kBAAM,GA3CqB,CA4ChCV,MA9CG,CAAC,kBAAM,IA4DjB,OAVI1D,IAAcrU,EACE4X,EAAYe,QAAO,SAACpf,EAAQjN,GAAT,OAAemrB,EAAIiB,OAAM,SAAA5B,GAAA,OAAMA,EAAGvd,EAAQjN,QAAK,CAC9EssB,WAAW,IAGChB,EAAYe,QAAO,SAACpf,EAAQjN,GAAT,OAAemrB,EAAIP,MAAK,SAAAJ,GAAA,OAAMA,EAAGvd,EAAQjN,QAAK,CAC7EssB,WAAW,KA+CVC,GAAuB,SAAC1C,EAAU9Y,EAAYuX,EAAUkE,EAAc1C,GAC/ED,EAAS7U,YAAcjE,EACvB8Y,EAASvL,wBAAwBgM,wBACjC7B,GACIH,EACAuB,EACAjX,EAAeC,OACd,CAAEtB,OAAQib,GACT1C,IA+BG2C,GAAmB,SAACnE,EAAUoE,EAAWnb,EAAQob,GAC1D,IAAMC,EAAStE,EAAS8B,MAAM7Y,EAAO+a,WACjCO,EAAgBH,EAiBpB,OAhBInb,EAAOpQ,OAAS8B,EAAcE,UAC9B0pB,EAAgBF,EAAUxT,QAAO,SAAAd,GAAA,OAA+C,IAAlCqU,EAAUniB,QAAQ8N,OAIpEuU,EAAO7R,eAAiB8R,EAAcxlB,KAAK,KAC3CulB,EAAOtO,wBAAwBgM,wBAE/B7B,GACIH,EACAsE,EACAha,EAAeE,QACf,CAAE4Z,YAAWnb,SAAQub,gBAAiBD,GACtC,MAGGD,GAIEG,GAAmB,SAACzE,EAAU0E,EAAczb,EAAQob,GAAjC,OAC5BK,EAAazgB,KAAI,SAAA0gB,GAAA,OACbR,GAAiBnE,EAAU2E,EAAY1b,EAAQob,OAE1CO,GAAqB,SAACtH,GAO/B,IALAA,EAAa/X,EAAQ,GAAI+X,IACT5V,OACZ4V,EAAW5V,KAAOlN,EAAUE,YAG3B4iB,EAAWzG,QACZ,OAAQyG,EAAW5V,MACnB,KAAKlN,EAAUC,QACX6iB,EAAWzG,QAAUvc,EAAeC,WACpC,MACJ,QACA,KAAKC,EAAUE,UACX4iB,EAAWzG,QAAU3c,EAAiBC,YAK9C,OAAOmjB,GAcEuH,GAA4B,SAAApd,GAAA,OAAUA,EAAOxD,KAAI,SAACqZ,GAG3D,OAd8B,SAACA,GAAe,IACtC5V,EAAwB4V,EAAxB5V,KAAMmP,EAAkByG,EAAlBzG,QAAS5e,EAASqlB,EAATrlB,KACvB,GAAIyP,IAASlN,EAAUE,WAAagN,IAASlN,EAAUC,QAKnD,MAAM,IAAI+R,MAAJ,wCAAkD9E,EAAlD,aAAmEzP,EAAnE,UAJN,IAAK6hB,GAAc3K,IAAI0H,GACnB,MAAM,IAAIrK,MAAJ,mDAA6DqK,EAA7D,aAAiF5e,EAAjF,UASd6sB,CADAxH,EAAasH,GAAmBtH,IAEzBA,MAeEyH,GAAa,SAACC,EAAUne,EAAMY,EAAQ9E,GAC/C8E,EAASod,GAA0Bpd,GACnC9E,EAAUvK,OAAO8Q,OAAO9Q,OAAO8Q,OAAO,GAAI+b,IAAgBtiB,GAC1D,IAAMic,EAAYC,GAAetmB,IAAIoK,EAAQoW,YAG7C,IAAK6F,EACD,MAAM,IAAIpS,MAAJ,mCAA6C7J,EAAQoW,WAArD,WAPiD,MAU3B6F,EAAU7B,QAAQlW,EAAMY,EAAQ9E,GAVL,UAUpDwX,EAVoD,KAU5ClN,EAV4C,MAZ/B,SAACxF,EAAQyd,GACrCzd,EAAO7C,SAAQ,SAAC0Y,GACZ,IAAM6H,EAAc7H,EAAW8H,GAC/B,GAAKD,EAAL,CAEA,IAAMvV,EAAMsV,EAAWjjB,QAAQqb,EAAWrlB,MAC1CitB,EAAWtV,GAAOuV,EAClB7H,EAAWrlB,KAAOktB,SACX7H,EAAW8H,OAetBC,CAAiB5d,EAAQ0S,GACzB,IAAMlT,EAAW8S,GAAa9M,EAAexF,EAAQ0S,GAG/CmL,EAAYve,EAAWC,gBAAgBC,EAAUtE,EAAQ1K,MAC/D+sB,EAASrD,mBAAqB2D,EAG9BN,EAAStY,YAAcO,EAAcxP,QAAUwP,EAAc,GAAGxP,OAAzC,MAAuDwP,EAAc,GAAGxP,OAAS,GAAM,GAG9G,IAAM8nB,EAAe,GACb5gB,EAAW2gB,EAAX3gB,OACF6gB,EAAgB7gB,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAMR,UAC1C4e,EAAsB9gB,EAAOV,KAAI,SAAAoD,GAAA,OAASA,EAAM4F,mBAQtD,OAPAzE,EAAmBwc,EAAStY,aAAa,SAAChV,GACtC6tB,EAAa7tB,GAvbrB,SAA+BiN,EAAQsI,EAAeyY,EAAShuB,GAC3D,IAAMonB,EAAO,GADiD,uBAG9D,YAA2Bna,EAAOsP,UAAlC,+CAA6C,yBAAjChb,EAAiC,KAA5BoO,EAA4B,KACzCyX,EAAKzX,EAAMpP,QAAU,IAAI6P,EAAMmF,EAAchU,GAAKvB,GAAIguB,EAAQzsB,GAAKvB,GAAI2P,IAJb,6EAM9D,OAAOyX,EAibe6G,CAAqBhhB,EAAQ8gB,EAAqBD,EAAe9tB,MAEvF4tB,EAAU1D,oBAAsB2D,EAEhCP,EAASvS,eAAkBhL,EAAOxD,KAAI,SAAA4R,GAAA,OAAKA,EAAE5d,QAAO8G,OACpDimB,EAASY,YAAcjjB,EAAQoW,aAAelf,EAAWI,KAAO2M,EAAiBC,GAAQlE,EAAQoW,WAC1FiM,GAGE/U,GAAgB,SAACxI,EAAQJ,GAGlC,IAFA,IAAI3P,EAAI,EAEDA,EAAI+P,EAAOhK,SAAU/F,EACxB,GAAI2P,IAAUI,EAAO/P,GAAGO,KACpB,MAAO,CACHA,KAAMoP,EACNK,KAAMD,EAAO/P,GAAGmf,SAAWpP,EAAO/P,GAAGgQ,KACrC1J,MAAOtG,GAInB,OAAO,MA+BLmuB,GAAgC,SAAC1C,EAAW3O,GAC9C,IAAMsR,EAActR,EAAUuR,iBAC1BC,EAAiB7C,EAerB,OAbA2C,EAAYlhB,SAAQ,SAACqhB,GACjB,GAAKA,EAAL,CADgC,IAMjB,EANiB,EAhCF,SAACA,GACnC,IAAIC,EAAS,GACTzG,SAEJ,OADAA,EAAYwG,EAAWrG,IAEvB,KAAKtV,EAAeC,OAChB2b,EAAS,CAACD,EAAWnG,UACrB,MACJ,KAAKxV,EAAeE,QAChB0b,EAAS,CAACD,EAAWpG,KAAK2E,iBAC1B,MACJ,KAAKla,EAAeO,KAChBqb,EAAS,CAACD,EAAWnG,UACrB,MACJ,KAAKxV,EAAeG,QAChBgV,EAAY,UACZyG,EAAS,CAACD,EAAWpG,KAAKsG,cAAcxd,MAAM,KAAMsd,EAAWnG,UAC/D,MACJ,QACIL,EAAY,KAGhB,MAAO,CACHA,YACAyG,UAa8BE,CAAuBH,GAA7CxG,EALwB,EAKxBA,UAAWyG,EALa,EAKbA,OACnB,GAAIzG,EACAuG,GAAiB,EAAAA,GAAevG,GAAf,WAA6ByG,GAA7B,QAAqC,CAClDlC,WAAW,UAKhBgC,GAmCEK,GAAsB,SAAC7G,GAChC,KAAOA,EAAM8G,SAAW9G,EAAMG,YAAY4G,MAAK,SAAAvuB,GAAA,OAAKA,EAAE4nB,KAAOtV,EAAeG,YACxE+U,EAAQA,EAAM8G,QAElB,OAAO9G,GAGEgH,GAAmB,SAAChH,GAC7B,KAAOA,EAAM8G,SACT9G,EAAQA,EAAM8G,QAElB,OAAO9G,GAGEiH,GAAqB,SAACjH,GAC/B,IADoD,IAAdkH,EAAc,uDAAP,GACtClH,EAAM8G,SACTI,EAAKxkB,KAAKsd,GACVA,EAAQA,EAAM8G,QAElB,OAAOI,GAGEC,GAA2B,SAACvD,EAAawD,EAAYC,EAAgB5d,GAC9E,IAAI6W,SACIgH,EAA4CD,EAA5CC,qBAAsBC,EAAsBF,EAAtBE,kBACxBC,EAAsBH,EAAeI,SAMrCC,EAAkB,SAAC,GAA4B,IAAlBC,EAAkB,EAA1Ble,OAAcuW,EAAY,EAAZA,MACnB4H,EAASD,EAAnBrH,SACJuH,SAKJ,OAHa,OAATD,GAAiBA,EAAKziB,OAAO2d,MAAK,SAAAtqB,GAAA,OAAKA,EAAE0P,OAASlN,EAAUC,aAC5D4sB,EAAehB,GAAoB7G,IAEhCpnB,OAAO8Q,OAAO,GAAIie,EAAM,CAC3BE,kBAIJC,EAAY,GAEhB,GAAoB,OAAhBlE,EACAkE,EAAY,CAAC,CACTxH,SAAU,KAEdA,EAAW,OACR,OACCyH,EAAkBnvB,OAAOkrB,OAAOwD,EAAqBU,iBAC/B,IAAtBT,IACAQ,EAAkBA,EAAgB1W,QAAO,SAAA7Y,GAAA,OAAKA,EAAEiR,OAAOge,WAAaD,MAGxE,IAAMS,EAAmBF,EAAgB1W,QA9B5B,SAAC6W,GAEd,OADeze,EAAO6C,UAAa,kBAAM,IAC3B4b,EAAOze,MA8Bf0e,EAAgB,GAEtB,IAA0B,IAAtBZ,EAA6B,CAC7B,IAAMa,EAAwBxvB,OAAOkrB,OAAOwD,EAAqBU,gBAEjEI,EAAsBhjB,SAAQ,SAACijB,GAC3B,IAAMC,EAAaD,EAAU5e,QACI,IAA7B6e,EAAWC,eAA2BD,EAAWE,SAAW/e,EAAO+e,QAC/DF,EAAWb,WAAaD,IAC5BW,EAAczlB,KAAK2lB,EAAUrI,QAC7BM,EAAW8H,EAAsB/W,QAAO,SAAA7Y,GAAA,OAAKA,IAAM6vB,KAAW5jB,IAAIijB,IACzDzpB,QAAU6pB,EAAUplB,KAAK,CAC9B4d,WACAmI,OAAQJ,EAAUrI,MAClBkH,KAAMD,GAAmBoB,EAAUrI,aAOnDM,GAAW,MAAG7C,OAAH,qBAAiBwK,EAAiBxjB,IAAIijB,IAAtC,CAAwD,CAC/DpH,SAAUsD,EACViE,aAA8B,OAAhBjE,GAAwBA,EAAYze,OAAO2d,MAAK,SAAAtqB,GAAA,OAAKA,EAAE0P,OAASlN,EAAUC,WACpF4rB,GAAoBQ,EAAeqB,mBAAqB,SAC5DrX,QAAO,SAAA7Y,GAAA,OAAW,OAANA,KAChBsvB,EAAUplB,KAAK,CACX4d,WACA6H,wBAAmBA,EAAnB,GAAqC1e,EAAO0e,eAAiB,OAIrE,IAAMQ,EAAYvB,EAAWpH,MAEvB4I,EAAahwB,OAAO8Q,OAAO,CAC7Bmf,kBAAmBjF,EACnB4D,uBACD/d,GAEHqe,EAAU1iB,SAAQ,SAAC0jB,GAAQ,IACLlB,EAASkB,EAAnBxI,SACFyI,EAAmB5F,GAAuBwF,EAAWf,EAAM,CAC7DrE,kBAAmBqE,EAAKb,MAAK,SAAAvuB,GAAA,OAAKA,EAAEqvB,eAAiBc,OAEnDzB,EAAO4B,EAAI5B,KAEjB,GAAIA,EAAM,CACN,IAAM8B,EAzIO,SAACrF,EAAWuD,GACjC,IAAK,IAAIhvB,EAAI,EAAG+M,EAAMiiB,EAAKjpB,OAAQ/F,EAAI+M,EAAK/M,IAAK,CAC7C,IAAM8nB,EAAQkH,EAAKhvB,GACnByrB,EAAY0C,GAA8B1C,EAAW3D,GAEzD,OAAO2D,EAoIuBsF,CAAiBF,EAAkB7B,EAAKgC,WAC9DJ,EAAIL,OAAOU,kBAAkBH,EAAeJ,QAlI3B,SAAvBQ,EAAwBpU,EAAW2O,GAA8C,IAAnCla,EAAmC,uDAA1B,GAAI4f,EAAsB,uDAAP,GACtElB,EAAgBkB,EAAalB,eAAiB,GAC9CL,EAAYuB,EAAa/I,SAEzBgJ,GAAYnB,EAAclqB,SAA+C,IAAtCkqB,EAAc1lB,QAAQuS,GAE/DsU,GAAatU,EAAUmU,kBAAkBxF,EAAWla,GAEpD,IAAM8f,EAAWvU,EAAUwU,UAC3BD,EAASnkB,SAAQ,SAACqkB,GACd,IAAMC,EAAmB5B,EAAUzW,QAAO,SAAA5T,GAAA,OAAOA,EAAIoqB,eAAiB4B,KAClEjD,EAAiBH,GAA8B1C,EAAW8F,GAE1DC,EAAiBzrB,SACjBuoB,EAAiBrD,GAAuBqD,EAAgBkD,EAAkB,CACtEpG,aAAa,EACbC,iBAAiB,EACjBjB,OAAO,KAGf8G,EAAqBK,EAAOjD,EAAgB/c,EAAQ4f,MAgHhDD,CAAqBT,EAAWI,EAAkBH,EAAY,CAC1DT,cAAeW,EAAIX,cACnB7H,SAAUsH,QAMb+B,GAA4B,SAACrC,EAAsBqB,EAAWtB,GACvE,IAAMuC,EAAmBtC,EAAqBsC,iBAE9C,IAAK,IAAMpB,KAAUoB,EAAkB,CACnC,IAAMvB,EAAYuB,EAAiBpB,GAC7BF,EAAaD,EAAU5e,OACvB+d,EAAsBH,EAAe5d,OAAOge,SAC5CoC,GAAwBxC,EAAeuB,WAAWiB,uBACpDxC,EAAeuB,WAAWiB,sBAAsBvB,EAAYjB,EAAe5d,QAC/E,GAAI6e,EAAWb,WAAaD,GAAuBqC,EAAuB,CACtE,IAAMC,EAAgBxB,EAAWhI,SACjC6G,GAAyB2C,EAAe,CACpC9J,MAAO2I,EACPoB,aAAclD,GAAoBwB,EAAUrI,QAC7C,CACCsH,uBACAC,mBAAmB,EACnBE,SAAUD,EACVkB,kBAAmBL,EAAUrI,OAC9BsI,MAKF0B,GAAqB,SAAC1C,GAA6C,IAAvB7d,EAAuB,uDAAd,GAAIuW,EAAU,aACxEiK,SACEC,EAAkBzgB,EAAOygB,gBACzB5J,EAAW7W,EAAO6W,SAClB7mB,EAASgQ,EAAO+e,OAAhB,IAA0B/e,EAAOge,SAGnCwC,EADAC,EACkB5C,EAAqBU,eAErBV,EAAqBsC,iBAG1B,OAAbtJ,SACO2J,EAAgBxwB,GAEvBwwB,EAAgBxwB,GAAO,CACnBumB,QACAvW,WAQC0gB,GAAyB,SAACvF,EAAWC,EAAWuF,GACzD,IAAMC,EAAsBzF,EAAU1U,QAAO,SAACC,EAAKtI,GAM/C,MAL+B,WAA3BA,EAAMyiB,YAAY7xB,KAClB0X,EAAIzN,KAAJ,MAAAyN,EAAA,GAAY0U,EAAUxT,QAAO,SAAAd,GAAA,OAA0C,IAA7BA,EAAUga,OAAO1iB,QACpDA,KAASuiB,GAChBja,EAAIzN,KAAKmF,GAENsI,IACR,IACH,OAAO7K,MAAMC,KAAK,IAAIgT,IAAI8R,IAAsB5lB,KAAI,SAAAoD,GAAA,OAASA,EAAMwQ,WAU1D5P,GAAwB,SAACZ,EAAO1O,GACzC,OAAI0O,EAAMmQ,aACCnQ,EAAMmQ,cAANnQ,CAAqB1O,GAEzBA,G,0PC1KIqxB,G,WA/hBX,c,4FAAwB,SACpB,IAAIC,SAEJnuB,KAAKwqB,QAAU,KACfxqB,KAAK6jB,YAAc,GACnB7jB,KAAKokB,oBAAsB,GAC3BpkB,KAAKktB,UAAY,GANG,2BAAR9C,EAAQ,qBAARA,EAAQ,gBAQE,IAAlBA,EAAOzoB,SAAkBwsB,EAAS/D,EAAO,cAAe8D,GAExDluB,KAAK2W,eAAiBwX,EAAOxX,eAC7B3W,KAAK4Q,YAAcud,EAAOvd,YAC1B5Q,KAAK8pB,YAAcqE,EAAOrE,YAC1B9pB,KAAKwqB,QAAU2D,EACfnuB,KAAK6lB,mBAAqB7lB,KAAKwqB,QAAQ3E,mBACvC7lB,KAAKouB,gBAAkB9jB,IACvBtK,KAAKka,wBAAwBgM,0BAE7B+C,GAAUA,cAACjpB,MAAX,OAAoBoqB,IACpBpqB,KAAKouB,gBAAkBpuB,KAAK6lB,mBAAmB1pB,KAC/C6D,KAAKka,wBAAwBgM,wBAC7BlmB,KAAKquB,sBAAwB,CACzB3C,eAAgB,GAChB4B,iBAAkB,K,+CA0B1B,OAAOttB,KAAKqQ,gBAAgBxH,OAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEyP,c,gCAY9C,OAAO3L,KAAKouB,kB,sCAIZ,OAAOpuB,KAAKsuB,c,8CAMZ,OAFAtuB,KAAKsuB,YAAcrL,GAAa,CAACjjB,KAAK4Q,YAAa5Q,KAAK2W,gBACnD3W,KAAKimB,uBAAwBjmB,KAAKouB,iBAChCpuB,O,6CAIP,OAAOA,KAAK6lB,qB,2BAiCV0I,EAAUve,GACZ,OAAOH,EAAa7P,KAAMuuB,EAAUve,K,kCAuB3Bue,GACT,OAAO1e,EAAa7P,KAAMuuB,EAAUpU,GAAkBna,KAAMuuB,IAAW,K,4BAqBpEC,GACH,OAAOjU,GAAMva,KAAMwuB,K,iCAoBXC,GACR,OAAOnY,GAAWtW,KAAMyuB,K,6BAkDpB/I,EAAUvY,GACd,IAAMuhB,EAAY,CACd3xB,KAAM8B,EAAcC,OACpBopB,WAAW,GAMf,OAJA/a,EAAS7Q,OAAO8Q,OAAO,GAAIshB,EAAWvhB,IAC/BpQ,KAAOoQ,EAAOpQ,MAAQ2xB,EAAU3xB,KD0DhB,SAACmnB,EAAUwB,EAAU0C,EAAcuG,GAC9D,IAAIC,EAAe,GAEb7xB,EAASqrB,EAATrrB,KAEAyrB,EAAStE,EAAS8B,MAAM2I,EAAYzG,WACpC2G,EAAmBrJ,GACrBgD,EACA9C,EACA0C,EACAlE,EACAU,IAEEL,EAAYD,GAAcvnB,GAAMwnB,UAItC,OAFA4D,GAAqBK,EAAQqG,EAAiBtK,EAAU,IAAKL,EAAUkE,EAAc1C,GAEjFnB,EAAU5iB,OAAS,GACnBitB,EAAe1K,EAAS8B,MAAM2I,EAAYzG,WAC1CC,GAAqByG,EAAcC,EAAiBtK,EAAU,IAAKL,EAAUkE,EAAc1C,GACpF,CAAC8C,EAAQoG,IAGbpG,EC9EIsG,CACH9uB,KACA0lB,EACAvY,EAJgB,CAAE+a,UAAW/a,EAAO+a,c,gCA4BxC,OAAQloB,KAAK4Q,YAAYjP,SAAW3B,KAAK2W,eAAehV,S,8BAUnC,IAAlBumB,IAAkB,yDACfzC,EAAW,IAAIzlB,KAAKguB,YAAYhuB,MAMtC,OALIkoB,EACAzC,EAASsJ,UAAU/uB,MAEnBylB,EAASsJ,UAAU,MAEhBtJ,I,8BA8CF6C,EAAWnb,GAChB,IAAMuhB,EAAY,CACd3xB,KAAM8B,EAAcC,OACpBopB,WAAW,GAEf/a,EAAS7Q,OAAO8Q,OAAO,GAAIshB,EAAWvhB,GACtC,IAAM2gB,EAAc9tB,KAAKonB,kBACnBmB,EAAYjsB,OAAO0J,KAAK8nB,GACtB/wB,EAASoQ,EAATpQ,KACFgxB,EAAsBF,GAAuBvF,EAAWC,EAAWuF,GAErEpV,SAEA3b,IAAS8B,EAAcG,IASvB0Z,EAAY,CARU2P,GAAiBroB,KAAM+tB,EAAqB,CAC9DhxB,KAAM8B,EAAcC,OACpBopB,UAAW/a,EAAO+a,WACnBK,GACkBF,GAAiBroB,KAAM+tB,EAAqB,CAC7DhxB,KAAM8B,EAAcE,QACpBmpB,UAAW/a,EAAO+a,WACnBK,IAIH7P,EADsB2P,GAAiBroB,KAAM+tB,EAAqB5gB,EAAQob,GAI9E,OAAO7P,I,wCAIP,OAAO1Y,KAAKgvB,e,8CAWZ,OAPAhvB,KAAKgvB,aAAehvB,KAAKsuB,YAAYzlB,OAAO+K,QAAO,SAACC,EAAKob,EAAUrzB,GAK/D,OAJAiY,EAAIob,EAAS9yB,QAAU,CACnB+F,MAAOtG,EACP+rB,IAAKsH,EAAStjB,UAEXkI,IACR,IACI7T,O,gCAWPA,KAAKwqB,SAAWxqB,KAAKwqB,QAAQ0E,YAAYlvB,MACzCA,KAAKwqB,QAAU,KACfxqB,KAAKktB,UAAUpkB,SAAQ,SAACqkB,GACpBA,EAAM3C,QAAU,QAEpBxqB,KAAKktB,UAAY,K,kCA6BRC,GACT,IAAIrZ,EAAM9T,KAAKktB,UAAUhY,WAAU,SAAAia,GAAA,OAAWA,IAAYhC,MACjD,IAATrZ,GAAa9T,KAAKktB,UAAU1lB,OAAOsM,EAAK,K,gCAQjCsb,GACPpvB,KAAKwqB,SAAWxqB,KAAKwqB,QAAQ0E,YAAYlvB,MACzCA,KAAKwqB,QAAU4E,EACfA,GAAUA,EAAOlC,UAAU9mB,KAAKpG,Q,kCA4BhC,OAAOA,KAAKwqB,U,oCA6BZ,OAAOxqB,KAAKktB,Y,uCA4BZ,OAAOltB,KAAK6jB,c,+CA4BZ,OAAO7jB,KAAKokB,wB,uwBC0SLxmB,G,YAxxBX,aAAsB,O,4FAAA,oCAANsJ,EAAM,qBAANA,EAAM,sB,iKAAA,2EACTA,KADS,OAGlB,EAAKmoB,eAAiB,GAHJ,E,0WAgFbxoB,GAQLA,EAAUvK,OAAO8Q,OAAO,GAPL,CACfkiB,MAAO,MACPjtB,UAAW,KACXktB,SAAS,EACTC,cAAc,EACdvd,KAAM,IAE8BpL,GACxC,IAAMgC,EAAS7I,KAAKimB,uBAAuBpd,OAErC4mB,EAAgB7Z,GAAY7Z,KAC9BiE,KACAA,KAAKimB,uBAAuBpd,OAC5B7I,KAAK4Q,YACL/J,EAAQ2oB,aAAe3mB,EAAOV,KAAI,SAAAjM,GAAA,OAAKA,EAAEC,UAAQ8G,OAASjD,KAAK2W,eAC/D9P,EAAQoL,KACR,CACI8D,WAA8B,WAAlBlP,EAAQyoB,MACpBxZ,SAAUjP,EAAQ0oB,UAI1B,IAAK1oB,EAAQxE,UACT,OAAOotB,EAxBG,IA2BNptB,EAAcwE,EAAdxE,UACA0I,EAAuB0kB,EAAvB1kB,KAAMY,EAAiB8jB,EAAjB9jB,OAAQ+J,EAAS+Z,EAAT/Z,KAChB6R,EAAa5b,EAAOxD,KAAK,SAAA9E,GAAA,OAAKA,EAAElH,QAEhCuzB,EADgBpzB,OAAO0J,KAAK3D,GACAuR,QAAO,SAACC,EAAK7F,GAC3C,IAAM8F,EAAMyT,EAAWphB,QAAQ6H,GAI/B,OAHa,IAAT8F,GACAD,EAAIzN,KAAK,CAAC0N,EAAKzR,EAAU2L,KAEtB6F,IACR,IAgCH,MA9BsB,WAAlBhN,EAAQyoB,MACRI,EAAY5mB,SAAQ,SAAC6mB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnB5kB,EAAK6kB,GAAM9mB,SAAQ,SAACqK,EAAO2c,GACvB/kB,EAAK6kB,GAAME,GAAYD,EAAM9zB,UACzBmE,EACAiT,EACAuC,EAAKoa,GACLnkB,EAAOikB,UAKnB7kB,EAAKjC,SAAQ,SAACqK,EAAO2c,GACjBJ,EAAY5mB,SAAQ,SAAC6mB,GACjB,IAAMC,EAAOD,EAAK,GACZE,EAAQF,EAAK,GAEnBxc,EAAMyc,GAAQC,EAAM9zB,UAChBmE,EACAiT,EAAMyc,GACNla,EAAKoa,GACLnkB,EAAOikB,UAMhBH,I,gCASP,IAAM9iB,EAAa3M,KAAK4Q,YAClBmf,EAAM,GAERpjB,EAAWhL,QACMgL,EAAWE,MAAM,KAEzB/D,SAAQ,SAACwK,GAAQ,MACHA,EAAIzG,MAAM,KAAK1E,IAAIO,QADhB,UACjBsE,EADiB,KACVC,EADU,KAGtBA,OAAc/M,IAAR+M,EAAoBA,EAAMD,EAChC+iB,EAAI3pB,KAAJ,MAAA2pB,EAAA,GAAY/mB,MAAMiE,EAAMD,EAAQ,GAAGgjB,OAAO7nB,KAAI,SAAC4R,EAAGjG,GAAJ,OAAY9G,EAAQ8G,UAI1E,OAAOic,I,8BA0BFE,GAAwD,IAA7CtX,EAA6C,uDAAlC,GAAIxL,EAA8B,uDAArB,CAAE+a,WAAW,GAC/CmC,EAAgBA,GAAG4F,EAAUhtB,OAC/BmnB,EAAS,CAACpqB,KAAMiwB,EAAWtX,GACzBiB,EAAenB,gBAAW2R,GAgBhC,OAdA/F,GACIrkB,KACA4Z,EACApL,EAAeG,QACf,CAAEshB,YAAW5F,gBAAenR,eAAgBV,GAAaU,kBACzDP,GAGAxL,EAAO+a,UACPtO,EAAamV,UAAU/uB,MAEvB4Z,EAAamV,UAAU,MAGpBnV,I,2BAsDL5F,GAA+C,IAA/B7G,EAA+B,uDAAtB,CAAE+a,WAAW,GAClC0B,EAAU5pB,KAAKkwB,QAAQ,CACzBZ,MAAO,MACPrd,KAAM+B,IAEJqK,EAASuL,EAAQje,OAAOxD,KAAI,SAAAoD,GAAA,OAASA,EAAMpP,QAC3Cg0B,EAAe,CAAC9R,GAAQ8C,OAAOyI,EAAQ7e,MAEvCqlB,EAAW,IAAIpwB,KAAKguB,YAAYmC,EAAcvG,EAAQje,OAAQ,CAAEsR,WAAY,WAgBlF,OAdAoH,GACIrkB,KACAowB,EACA5hB,EAAeO,KACf5B,EACA6G,GAGA7G,EAAO+a,UACPkI,EAASrB,UAAU/uB,MAEnBowB,EAASrB,UAAU,MAGhBqB,I,gCAwBAxkB,EAAM/E,GACb+E,EAAOA,GAAQ5L,KAAK8pB,YACpBjjB,EAAUvK,OAAO8Q,OAAO,GAAI,CAAE4U,eAAgB,KAAOnb,GAErD,IAAMgC,EAAS7I,KAAKqQ,gBAAgBxH,OAC9BwnB,EAAUxnB,EAAOV,KAAI,SAAAmM,GAAA,OAAKA,EAAEnD,mBAC5Bmf,EAAYD,EAAQ,GAAG1uB,OACzB4uB,SACAC,SACAC,SAEJ,GAAI7kB,IAAS7N,EAAWC,UAEpB,IADAuyB,EAAiB,GACZC,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMjb,EAAM,GACZ,IAAKkb,EAAS,EAAGA,EAAS5nB,EAAOlH,OAAQ8uB,IACrClb,EAAI1M,EAAO4nB,GAAQt0B,QAAUk0B,EAAQI,GAAQD,GAEjDD,EAAenqB,KAAKmP,QAErB,GAAI3J,IAAS7N,EAAWE,QAAS,CAEpC,IADAsyB,EAAiB,CAAC1nB,EAAOV,KAAI,SAAAmM,GAAA,OAAKA,EAAEnY,UAAQ8G,KAAK4D,EAAQmb,iBACpDwO,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMjb,EAAM,GACZ,IAAKkb,EAAS,EAAGA,EAAS5nB,EAAOlH,OAAQ8uB,IACrClb,EAAInP,KAAKiqB,EAAQI,GAAQD,IAE7BD,EAAenqB,KAAKmP,EAAItS,KAAK4D,EAAQmb,iBAEzCuO,EAAiBA,EAAettB,KAAK,UAClC,IAAI2I,IAAS7N,EAAWG,QAU3B,MAAM,IAAIwS,MAAJ,aAAuB9E,EAAvB,qBARN,IADA2kB,EAAiB,CAAC1nB,EAAOV,KAAI,SAAAmM,GAAA,OAAKA,EAAEnY,WAC/Bq0B,EAAS,EAAGA,EAASF,EAAWE,IAAU,CAC3C,IAAMjb,EAAM,GACZ,IAAKkb,EAAS,EAAGA,EAAS5nB,EAAOlH,OAAQ8uB,IACrClb,EAAInP,KAAKiqB,EAAQI,GAAQD,IAE7BD,EAAenqB,KAAKmP,IAM5B,OAAOgb,I,+BAGDhlB,GACN,IAAM0I,EAAY1I,EAAMpP,OACxB6D,KAAK2W,gBAAL,IAA2B1C,EAC3B,IAAMiP,EAAoBljB,KAAK6lB,mBACzBD,EAAqB1C,EAAkB4C,oBACvC3U,EAAgB5F,EAAM4F,gBACtByY,EAAUre,EAAM2F,aAAanG,KAEnC,GAAKmY,EAAkB7X,YAAYE,EAAMpP,QAKlC,CACH,IAAM4M,EAAama,EAAkBra,OAAOqM,WAAU,SAAAwb,GAAA,OAAaA,EAAUv0B,SAAW8X,KACxFlL,GAAc,IAAMma,EAAkBra,OAAOE,GAAcwC,QAN3D2X,EAAkBra,OAAOzC,KAAKmF,GAC9Bqa,EAAmB9c,SAAQ,SAACV,EAAKxM,GAC7BwM,EAAImD,EAAMpP,QAAU,IAAI6P,EAAMmF,EAAcvV,GAAIguB,EAAQhuB,GAAI2P,MAapE,OALA2X,EAAkB5X,iBAAmB,KACrC4X,EAAkBnX,iBAAmB,KACrCmX,EAAkBxX,eAAiB,KAEnC1L,KAAKka,wBAAwBgM,wBACtBlmB,O,wCAuCQ2L,EAAQglB,EAAYxjB,GAAQ,WAC3CxB,EAASmd,GAAmBnd,GAC5BwB,EAAS7Q,OAAO8Q,OAAO,GAAI,CAAE8a,WAAW,EAAM0I,YAAY,GAASzjB,GAEnE,IAAM0jB,EAAe7wB,KAAKonB,kBACpB0J,EAAUH,EAAWtc,MAAM,EAAGsc,EAAWhvB,OAAS,GAClDovB,EAAaJ,EAAWA,EAAWhvB,OAAS,GAElD,GAAIkvB,EAAallB,EAAOxP,QAAUgR,EAAOyjB,WACrC,MAAM,IAAIlgB,MAAS/E,EAAOxP,KAApB,sCAGV,IAAM60B,EAAkBF,EAAQ3oB,KAAI,SAACoD,GACjC,IAAM0lB,EAAYJ,EAAatlB,GAC/B,IAAK0lB,EAED,MAAM,IAAIvgB,MAASnF,EAAb,gCAEV,OAAO0lB,EAAU/uB,SAGf8jB,EAAQhmB,KAAKgmB,MAAM7Y,EAAO+a,WAE1BgJ,EAAKlL,EAAM3V,gBAAgBxH,OAC3BsoB,EAAiBH,EAAgB7oB,KAAI,SAAA2L,GAAA,OAAOod,EAAGpd,MAEjDkG,EAAc,GACdC,EAAgB,kBAAM,EAAK1I,gBAEzB6f,EAAiB,GACvB1kB,EAAmBsZ,EAAMpV,aAAa,SAAChV,GACnC,IAAMy1B,EAAaF,EAAehpB,KAAI,SAAAoD,GAAA,OAASA,EAAM2F,aAAanG,KAAKnP,MACvEw1B,EAAex1B,GAAKm1B,kBAAcM,GAAd,QAA0Bz1B,EAAGqe,EAAeD,QAhCzB,MAkC3BiE,GAAa,CAACmT,GAAiB,CAACzlB,GAAS,CAACA,EAAOxP,OAA1DoP,EAlCoC,WA6C3C,OAVAya,EAAMsL,SAAS/lB,GAEf8Y,GACIrkB,KACAgmB,EACAxX,EAAeK,QACf,CAAE1B,OAAQxB,EAAQ9C,OAAQioB,GAC1BC,GAGG/K,I,gCAWAsB,GAA2D,IAA9Cna,EAA8C,uDAArC,GAAIokB,EAAiC,aAAjBjF,EAAiB,uDAAJ,GACxDsB,EAAkBzgB,EAAOygB,gBACzB1C,EAAsB/d,EAAOge,SAC7BqG,EAAUrkB,EAAOqkB,QACjBnF,EAAY3B,GAAiB1qB,MAC7BgrB,EAAuBqB,EAAUgC,sBACjCoD,EAAmBlH,GAAoBvqB,MACvC8qB,EAAa,CACf2C,aAAcgE,EACd/N,MAAO2I,GAkBX,OAfAkF,GAAkB7D,GAAmB1C,EAAsB7d,EAAQnN,MACnE6qB,GAAyBvD,EAAawD,EAAY,CAAEE,uBAChDG,SAAUD,EACVkB,kBAAmBpsB,MACnB1D,OAAO8Q,OAAO,CACVokB,WACDrkB,IAEHygB,GACAP,GAA0BrC,EAAsBqB,EAAW,CACvDlf,SACAmf,eAIDtsB,O,yBAUP0xB,EAAW9kB,GACX,OAAQ8kB,GACR,I3CtlBmB,c2CulBf1xB,KAAKqvB,eAAejpB,KAAKwG,GAG7B,OAAO5M,O,kCASE0xB,GACT,OAAQA,GACR,I3CrmBmB,c2CsmBf1xB,KAAKqvB,eAAiB,GAI1B,OAAOrvB,O,wCAUQqnB,EAAWmK,GAAS,WACfxxB,KAAKqvB,eACXvmB,SAAQ,SAAAsd,GAAA,OAAMA,EAAGrqB,KAAK,EAAMsrB,EAAWmK,Q,0BA8CpDG,EAAkBxkB,GACnB,IAAM0jB,EAAe7wB,KAAKonB,kBAE1B,IAAKyJ,EAAac,GACd,MAAM,IAAIjhB,MAAJ,SAAmBihB,EAAnB,kBAGV,IAAMC,EAAezkB,EAAOhR,MAAWw1B,EAAlB,UAErB,GAAId,EAAae,GACb,MAAM,IAAIlhB,MAAJ,SAAmBkhB,EAAnB,mBAGV,IAb2B,E5CznB5B,SAAgCC,EAAcllB,EAAYQ,GAAQ,IAC/DY,EAA4CZ,EAA5CY,QAAS+jB,EAAmC3kB,EAAnC2kB,UAAWhkB,EAAwBX,EAAxBW,QAASd,EAAeG,EAAfH,MAAOC,EAAQE,EAARF,IAD2B,EAEhD4kB,EAAa3V,SAFmC,SAE9D6V,EAF8D,KAExDC,EAFwD,KAIhEjkB,IACDf,EAAmB,IAAVA,KAAiBA,GAASA,EAAQ+kB,GAASA,EAAO/kB,EAC3DC,EAAe,IAARA,KAAeA,GAAOA,EAAM+kB,GAAUA,EAAO,EAAK/kB,EAErD6kB,IACAhkB,EAAU9J,KAAKiuB,KAAKjuB,KAAKkuB,IAAIjlB,EAAMD,GAAS8kB,IAGhD/jB,EAAUF,EAAgBC,EAASd,EAAOC,IAG1Cc,EAAQ,GAAKgkB,GACbhkB,EAAQpG,QAAQoqB,GAEhBhkB,EAAQA,EAAQpM,OAAS,IAAMqwB,GAC/BjkB,EAAQ3H,KAAK4rB,EAAO,GAIxB,IADA,IAAM9jB,EAAe,GACZtS,EAAI,EAAGA,EAAImS,EAAQpM,OAAS,EAAG/F,IACpCsS,EAAa9H,KAAK,CACd4G,MAAOe,EAAQnS,GACfqR,IAAKc,EAAQnS,EAAI,KAIzB,IAAMu2B,EAAa,GAYnB,OAXAzlB,EAAmBC,GAAY,SAAC/Q,GAC5B,IAAMuX,EAAQ0e,EAAa3gB,aAAanG,KAAKnP,GAC7C,GAAIuX,aAAiBjG,EACjBilB,EAAW/rB,KAAK+M,OADpB,CAKA,IAAM3R,EAAQyM,EAAgBC,EAAciF,GAC5Cgf,EAAW/rB,KAAQ5E,EAAMwL,MAAzB,IAAkCxL,EAAMyL,SAGrC,CAAEklB,aAAY3U,KAAMzP,G4C6lBMqkB,CADRpyB,KAAKqQ,gBAAgBhF,YAAYsmB,GACW3xB,KAAK4Q,YAAazD,GAA3EglB,EAdmB,EAcnBA,WAAY3U,EAdO,EAcPA,KAEd6U,EAAWpU,GAAa,CAACkU,GAAa,CACxC,CACIh2B,KAAMy1B,EACNhmB,KAAMlN,EAAUE,UAChBmc,QAAS3c,EAAiBG,OAC1Bif,SACA,CAACoU,IAAe,GAElB5L,EAAQhmB,KAAKgmB,MAAM7Y,EAAO+a,WAWhC,OAVAlC,EAAMsL,SAASe,GAEfhO,GACIrkB,KACAgmB,EACAxX,EAAeM,IACd,CAAE6iB,mBAAkBxkB,SAAQykB,gBAC5B,MAGE5L,I,qCA8BP,OAAO,IAAIpoB,EAHEoC,KAAKsyB,UAAUv0B,EAAWC,WACxBgC,KAAKuyB,e,iCA+CZ7Y,EAAcL,EAAWlM,GACjC,IAAM0jB,EAAe7wB,KAAKonB,kBAE1B1N,EAAa5Q,SAAQ,SAACmL,GAClB,IAAK4c,EAAa5c,GACd,MAAM,IAAIvD,MAAJ,SAAmBuD,EAAnB,mCAId,IAAMya,EAAY,CACd3xB,KAAM8B,EAAcC,OACpBopB,WAAW,GAKf,OFthBuB,SAAChE,EAAUxK,GAAiD,IAAnCL,EAAmC,uDAAvB,SAAAlY,GAAA,OAAOA,GAAKgM,EAAW,aAEnF+a,EACA/a,EADA+a,UAEE1O,EAAgB0K,EAAS7T,gBAAgBhF,YAJwC,EASnFma,GACAtB,EAAS8B,MAAMkC,GACf7O,EACAlM,EACA+W,GACA,sCAAIkG,EAAJ,qBAAIA,EAAJ,uBAAehF,GAAuBA,aAAIgF,EAA3B,QAAmC1Q,EAAcF,QAPhE6L,EAPmF,EAOnFA,gBACAC,EARmF,EAQnFA,aASEkN,EAAY,GAoBlB,OAnBAl2B,OAAO0J,KAAKqf,GAAiBpT,OAAOnJ,SAAQ,SAACzF,GACzC,GAAIgiB,EAAgBhiB,GAAI,CACpB,IAAMmlB,EAAStE,EAAS8B,MAAMkC,GACxBiC,EAAa7E,EAAajiB,GAChCmlB,EAAO5X,YAAcyU,EAAgBhiB,GAAGJ,KAAK,KAC7CulB,EAAOtO,wBAAwBgM,wBAI3BgC,GACA7D,GAAmBH,EAAUsE,EAAQha,EAAeC,OAAQtB,GAHtC,SAAAtE,GAAA,OAAU6Q,EAAasO,OAAM,SAAAjO,GAAA,OAAKlR,EAAOkR,GAAGO,gBAAkB6P,EAAWnkB,KAAK+T,SAKxGyO,EAAO3E,YAAY2E,EAAO3E,YAAYliB,OAAS,GAAGoiB,KAAOuB,EAAajiB,GAEtEmvB,EAAUpsB,KAAKoiB,OAKhBgK,EEifIC,CAAgBzyB,KAAM0Z,EAAcL,EAF3ClM,EAAS7Q,OAAO8Q,OAAO,GAAIshB,EAAWvhB,M,sCAyCmB,IAA9CulB,EAA8C,uDAA/B,GAAIC,EAA2B,uDAAZ,GAAIxlB,EAAQ,aACnDuhB,EAAY,CACd3xB,KAAM8B,EAAcC,OACpBopB,WAAW,GAET4F,EAAc9tB,KAAKonB,kBACnBmB,EAAYjsB,OAAO0J,KAAK8nB,GACxB8E,EAA0B,CAAC,CAACD,IAalC,OAXAxlB,EAAS7Q,OAAO8Q,OAAO,GAAIshB,EAAWvhB,IACtCulB,EAAeA,EAAa/wB,OAAS+wB,EAAe,CAAC,KAGxC5pB,SAAQ,SAAC+pB,EAAUj3B,GAC5Bg3B,EAAwBh3B,GAAKiyB,GAAuBA,GAADA,UAC3CgF,GADqB,GACRF,IACjBpK,EACAuF,MAGDnF,GAAiB3oB,KAAM4yB,EAAyBzlB,EAAQob,M,kDAtuBhCpb,GAC/B,OAAOD,EAAkBI,iBAAiBH,K,+BA7B1C,OAAOqL,K,iCAOP,OAAOuK,K,iCAOP,OAAO/E,O,GAnESkQ,ICxCThX,GAAoDM,GAApDN,IAAKG,GAA+CG,GAA/CH,IAAKK,GAA0CF,GAA1CE,IAAKC,GAAqCH,GAArCG,IAAKmb,GAAgCtb,GAAhCsb,MAAOC,GAAyBvb,GAAzBub,KAAMC,GAAmBxb,GAAnBwb,MAAYC,GAAOzb,GAAZ0b,ICyBjDC,GAAY,CACdC,QC2LmB,sCAAIC,EAAJ,qBAAIA,EAAJ,uBACnB,SAACxc,GAAqC,IAAjC1J,EAAiC,uDAAxB,CAAE+a,WAAW,GACnBoL,EAAYzc,EACZ0c,SACEvJ,EAAc,GA8BpB,OA5BAqJ,EAAWvqB,SAAQ,SAAC6a,GAChB2P,EAAY3P,EAAU2P,GACtBtJ,EAAY5jB,KAAZ,MAAA4jB,EAAA,EAAoBsJ,EAAUzP,cACzB0P,IACDA,EAAaD,MAIjBC,GAAcA,IAAeD,GAC7BC,EAAWC,UAIfF,EAAUlP,oBAAsB,GAChCC,GACIxN,EACAyc,EACA9kB,EAAeI,QACf,KACAob,GAGA7c,EAAO+a,UACPoL,EAAUvE,UAAUlY,GAEpByc,EAAUvE,UAAU,MAGjBuE,ID5NXG,ICyHe,sCAAIvsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA2P,GAAA,OAAMA,EAAG4c,IAAH,MAAA5c,EAAU3P,KDxH5C+gB,OC6BkB,sCAAI/gB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA2P,GAAA,OAAMA,EAAGoR,OAAH,MAAApR,EAAa3P,KD5BlDwsB,QC4DmB,sCAAIxsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA2P,GAAA,OAAMA,EAAG6c,QAAH,MAAA7c,EAAc3P,KD3DpDuR,QCmJmB,sCAAIvR,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA2P,GAAA,OAAMA,EAAG4B,QAAH,MAAA5B,EAAc3P,KDlJpDysB,kBE1B6B,sCAAIzsB,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA2P,GAAA,OAAMA,EAAG8c,kBAAH,MAAA9c,EAAwB3P,KF2BxE+K,KElBgB,sCAAI/K,EAAJ,qBAAIA,EAAJ,uBAAa,SAAA2P,GAAA,OAAMA,EAAG5E,KAAH,MAAA4E,EAAW3P,KFmB9C2I,eACAyG,cACAsd,YGlCG,SAAsBnZ,EAAYC,GACrC,OAAO7K,EAAa4K,EAAYC,EAAYP,GAAkBM,EAAYC,IAAa,IHkCvFF,iBACAG,kBACAkZ,cjC3BG,SAAwBpZ,EAAYC,EAAY1K,GACnD,OAAOuK,GAAMC,GAAcC,EAAYC,EAAY1K,GAAW2K,GAAeF,EAAYC,EAAY1K,KiC2BrGuK,SACA7N,sBAGEonB,G,KAAcA,QACpBx3B,OAAO8Q,OAAOxP,GAAW,CACrBu1B,aACAY,QACAvlB,iBACA1O,oBACA/B,aACAc,gBACAqO,oBACA4mB,WACAtV,iBACAwV,iBACDC,GAEYr2B","file":"datamodel.js","sourcesContent":["(function webpackUniversalModuleDefinition(root, factory) {\n\tif(typeof exports === 'object' && typeof module === 'object')\n\t\tmodule.exports = factory();\n\telse if(typeof define === 'function' && define.amd)\n\t\tdefine(\"DataModel\", [], factory);\n\telse if(typeof exports === 'object')\n\t\texports[\"DataModel\"] = factory();\n\telse\n\t\troot[\"DataModel\"] = factory();\n})(window, function() {\nreturn "," \t// The module cache\n \tvar installedModules = {};\n\n \t// The require function\n \tfunction __webpack_require__(moduleId) {\n\n \t\t// Check if module is in cache\n \t\tif(installedModules[moduleId]) {\n \t\t\treturn installedModules[moduleId].exports;\n \t\t}\n \t\t// Create a new module (and put it into the cache)\n \t\tvar module = installedModules[moduleId] = {\n \t\t\ti: moduleId,\n \t\t\tl: false,\n \t\t\texports: {}\n \t\t};\n\n \t\t// Execute the module function\n \t\tmodules[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\n \t\t// Flag the module as loaded\n \t\tmodule.l = true;\n\n \t\t// Return the exports of the module\n \t\treturn module.exports;\n \t}\n\n\n \t// expose the modules object (__webpack_modules__)\n \t__webpack_require__.m = modules;\n\n \t// expose the module cache\n \t__webpack_require__.c = installedModules;\n\n \t// define getter function for harmony exports\n \t__webpack_require__.d = function(exports, name, getter) {\n \t\tif(!__webpack_require__.o(exports, name)) {\n \t\t\tObject.defineProperty(exports, name, { enumerable: true, get: getter });\n \t\t}\n \t};\n\n \t// define __esModule on exports\n \t__webpack_require__.r = function(exports) {\n \t\tif(typeof Symbol !== 'undefined' && Symbol.toStringTag) {\n \t\t\tObject.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });\n \t\t}\n \t\tObject.defineProperty(exports, '__esModule', { value: true });\n \t};\n\n \t// create a fake namespace object\n \t// mode & 1: value is a module id, require it\n \t// mode & 2: merge all properties of value into the ns\n \t// mode & 4: return value when already ns object\n \t// mode & 8|1: behave like require\n \t__webpack_require__.t = function(value, mode) {\n \t\tif(mode & 1) value = __webpack_require__(value);\n \t\tif(mode & 8) return value;\n \t\tif((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;\n \t\tvar ns = Object.create(null);\n \t\t__webpack_require__.r(ns);\n \t\tObject.defineProperty(ns, 'default', { enumerable: true, value: value });\n \t\tif(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));\n \t\treturn ns;\n \t};\n\n \t// getDefaultExport function for compatibility with non-harmony modules\n \t__webpack_require__.n = function(module) {\n \t\tvar getter = module && module.__esModule ?\n \t\t\tfunction getDefault() { return module['default']; } :\n \t\t\tfunction getModuleExports() { return module; };\n \t\t__webpack_require__.d(getter, 'a', getter);\n \t\treturn getter;\n \t};\n\n \t// Object.prototype.hasOwnProperty.call\n \t__webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };\n\n \t// __webpack_public_path__\n \t__webpack_require__.p = \"\";\n\n\n \t// Load entry module and return exports\n \treturn __webpack_require__(__webpack_require__.s = 1);\n","const DataModel = require('./export');\n\nmodule.exports = DataModel.default ? DataModel.default : DataModel;\n","/**\n * DataFormat Enum defines the format of the input data.\n * Based on the format of the data the respective adapter is loaded.\n *\n * @readonly\n * @enum {string}\n */\nconst DataFormat = {\n FLAT_JSON: 'FlatJSON',\n DSV_STR: 'DSVStr',\n DSV_ARR: 'DSVArr',\n AUTO: 'Auto'\n};\n\nexport default DataFormat;\n","/**\n * DimensionSubtype enum defines the sub types of the Dimensional Field.\n *\n * @readonly\n * @enum {string}\n */\nconst DimensionSubtype = {\n CATEGORICAL: 'categorical',\n TEMPORAL: 'temporal',\n BINNED: 'binned'\n};\n\nexport default DimensionSubtype;\n","/**\n * MeasureSubtype enum defines the sub types of the Measure Field.\n *\n * @readonly\n * @enum {string}\n */\nconst MeasureSubtype = {\n CONTINUOUS: 'continuous'\n};\n\nexport default MeasureSubtype;\n","/**\n * FieldType enum defines the high level field based on which visuals are controlled.\n * Measure in a high level is numeric field and Dimension in a high level is string field.\n *\n * @readonly\n * @enum {string}\n */\nconst FieldType = {\n MEASURE: 'measure',\n DIMENSION: 'dimension'\n};\n\nexport default FieldType;\n","/**\n * Filtering mode enum defines the filering modes of DataModel.\n *\n * @readonly\n * @enum {string}\n */\nconst FilteringMode = {\n NORMAL: 'normal',\n INVERSE: 'inverse',\n ALL: 'all'\n};\n\nexport default FilteringMode;\n","/**\n * Group by function names\n *\n * @readonly\n * @enum {string}\n */\nconst GROUP_BY_FUNCTIONS = {\n SUM: 'sum',\n AVG: 'avg',\n MIN: 'min',\n MAX: 'max',\n FIRST: 'first',\n LAST: 'last',\n COUNT: 'count',\n STD: 'std'\n};\n\nexport default GROUP_BY_FUNCTIONS;\n","/**\n * Creates a JS native date object from input\n *\n * @param {string | number | Date} date Input using which date object to be created\n * @return {Date} : JS native date object\n */\nfunction convertToNativeDate (date) {\n if (date instanceof Date) {\n return date;\n }\n\n return new Date(date);\n}\n/**\n * Apply padding before a number if its less than 1o. This is used when constant digit's number to be returned\n * between 0 - 99\n *\n * @param {number} n Input to be padded\n * @return {string} Padded number\n */\nfunction pad (n) {\n return (n < 10) ? (`0${n}`) : n;\n}\n/*\n * DateFormatter utility to convert any date format to any other date format\n * DateFormatter parse a date time stamp specified by a user abiding by rules which are defined\n * by user in terms of token. It creates JS native date object from the user specified format.\n * That native date can also be displayed\n * in any specified format.\n * This utility class only takes care of format conversion only\n */\n\n/*\n * Escapes all the special character that are used in regular expression.\n * Like\n * RegExp.escape('sgfd-$') // Output: sgfd\\-\\$\n *\n * @param text {String} : text which is to be escaped\n */\nRegExp.escape = function (text) {\n return text.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n};\n\n/**\n * DateTimeFormatter class to convert any user format of date time stamp to any other format\n * of date time stamp.\n *\n * @param {string} format Format of the date given. For the above date,\n * 'year: %Y, month: %b, day: %d'.\n * @class\n */\n/* istanbul ignore next */ function DateTimeFormatter (format) {\n this.format = format;\n this.dtParams = undefined;\n this.nativeDate = undefined;\n}\n\n// The identifier of the tokens\nDateTimeFormatter.TOKEN_PREFIX = '%';\n\n// JS native Date constructor takes the date params (year, month, etc) in a certail sequence.\n// This defines the sequence of the date parameters in the constructor.\nDateTimeFormatter.DATETIME_PARAM_SEQUENCE = {\n YEAR: 0,\n MONTH: 1,\n DAY: 2,\n HOUR: 3,\n MINUTE: 4,\n SECOND: 5,\n MILLISECOND: 6\n};\n\n/*\n * This is a default number parsing utility. It tries to parse a number in integer, if parsing is unsuccessful, it\n * gives back a default value.\n *\n * @param: defVal {Number} : Default no if the parsing to integer is not successful\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be parsed.\n */\nDateTimeFormatter.defaultNumberParser = function (defVal) {\n return function (val) {\n let parsedVal;\n if (isFinite(parsedVal = parseInt(val, 10))) {\n return parsedVal;\n }\n\n return defVal;\n };\n};\n\n/*\n * This is a default number range utility. It tries to find an element in the range. If not found it returns a\n * default no as an index.\n *\n * @param: range {Array} : The list which is to be serached\n * @param: defVal {Number} : Default no if the serach and find does not return anything\n * @return {Function} : An closure function which is to be called by passing an the value which needs to be found\n */\nDateTimeFormatter.defaultRangeParser = function (range, defVal) {\n return (val) => {\n let i;\n let l;\n\n if (!val) { return defVal; }\n\n const nVal = val.toLowerCase();\n\n for (i = 0, l = range.length; i < l; i++) {\n if (range[i].toLowerCase() === nVal) {\n return i;\n }\n }\n\n if (i === undefined) {\n return defVal;\n }\n return null;\n };\n};\n\n/*\n * Defines the tokens which are supporter by the dateformatter. Using this definitation a value gets extracted from\n * the user specifed date string. This also formats the value for display purpose from native JS date.\n * The definition of each token contains the following named properties\n * {\n * %token_name% : {\n * name: name of the token, this is used in reverse lookup,\n * extract: a function that returns the regular expression to extract that piece of information. All the\n * regex should be gouped by using ()\n * parser: a function which receives value extracted by the above regex and parse it to get the date params\n * formatter: a formatter function that takes milliseconds or JS Date object and format the param\n * represented by the token only.\n * }\n * }\n *\n * @return {Object} : Definition of the all the supported tokens.\n */\nDateTimeFormatter.getTokenDefinitions = function () {\n const daysDef = {\n short: [\n 'Sun',\n 'Mon',\n 'Tue',\n 'Wed',\n 'Thu',\n 'Fri',\n 'Sat'\n ],\n long: [\n 'Sunday',\n 'Monday',\n 'Tuesday',\n 'Wednesday',\n 'Thursday',\n 'Friday',\n 'Saturday'\n ]\n };\n const monthsDef = {\n short: [\n 'Jan',\n 'Feb',\n 'Mar',\n 'Apr',\n 'May',\n 'Jun',\n 'Jul',\n 'Aug',\n 'Sep',\n 'Oct',\n 'Nov',\n 'Dec'\n ],\n long: [\n 'January',\n 'February',\n 'March',\n 'April',\n 'May',\n 'June',\n 'July',\n 'August',\n 'September',\n 'October',\n 'November',\n 'December'\n ]\n };\n\n const definitions = {\n H: {\n // 24 hours format\n name: 'H',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n\n return d.getHours().toString();\n }\n },\n l: {\n // 12 hours format\n name: 'l',\n index: 3,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const hours = d.getHours() % 12;\n\n return (hours === 0 ? 12 : hours).toString();\n }\n },\n p: {\n // AM or PM\n name: 'p',\n index: 3,\n extract () { return '(AM|PM)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'AM' : 'PM');\n }\n },\n P: {\n // am or pm\n name: 'P',\n index: 3,\n extract () { return '(am|pm)'; },\n parser: (val) => {\n if (val) {\n return val.toLowerCase();\n }\n return null;\n },\n formatter: (val) => {\n const d = convertToNativeDate(val);\n const hours = d.getHours();\n\n return (hours < 12 ? 'am' : 'pm');\n }\n },\n M: {\n // Two digit minutes 00 - 59\n name: 'M',\n index: 4,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const mins = d.getMinutes();\n\n return pad(mins);\n }\n },\n S: {\n // Two digit seconds 00 - 59\n name: 'S',\n index: 5,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const seconds = d.getSeconds();\n\n return pad(seconds);\n }\n },\n K: {\n // Milliseconds\n name: 'K',\n index: 6,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const ms = d.getMilliseconds();\n\n return ms.toString();\n }\n },\n a: {\n // Short name of day, like Mon\n name: 'a',\n index: 2,\n extract () { return `(${daysDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.short[day]).toString();\n }\n },\n A: {\n // Long name of day, like Monday\n name: 'A',\n index: 2,\n extract () { return `(${daysDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(daysDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDay();\n\n return (daysDef.long[day]).toString();\n }\n },\n e: {\n // 8 of March, 11 of November\n name: 'e',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return day.toString();\n }\n },\n d: {\n // 08 of March, 11 of November\n name: 'd',\n index: 2,\n extract () { return '(\\\\d+)'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const day = d.getDate();\n\n return pad(day);\n }\n },\n b: {\n // Short month, like Jan\n name: 'b',\n index: 1,\n extract () { return `(${monthsDef.short.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.short),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.short[month]).toString();\n }\n },\n B: {\n // Long month, like January\n name: 'B',\n index: 1,\n extract () { return `(${monthsDef.long.join('|')})`; },\n parser: DateTimeFormatter.defaultRangeParser(monthsDef.long),\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return (monthsDef.long[month]).toString();\n }\n },\n m: {\n // Two digit month of year like 01 for January\n name: 'm',\n index: 1,\n extract () { return '(\\\\d+)'; },\n parser (val) { return DateTimeFormatter.defaultNumberParser()(val) - 1; },\n formatter (val) {\n const d = convertToNativeDate(val);\n const month = d.getMonth();\n\n return pad(month + 1);\n }\n },\n y: {\n // Short year like 90 for 1990\n name: 'y',\n index: 0,\n extract () { return '(\\\\d{2})'; },\n parser (val) {\n let result;\n if (val) {\n const l = val.length;\n val = val.substring(l - 2, l);\n }\n let parsedVal = DateTimeFormatter.defaultNumberParser()(val);\n let presentDate = new Date();\n let presentYear = Math.trunc((presentDate.getFullYear()) / 100);\n\n result = `${presentYear}${parsedVal}`;\n\n if (convertToNativeDate(result).getFullYear() > presentDate.getFullYear()) {\n result = `${presentYear - 1}${parsedVal}`;\n }\n return convertToNativeDate(result).getFullYear();\n },\n formatter (val) {\n const d = convertToNativeDate(val);\n let year = d.getFullYear().toString();\n let l;\n\n if (year) {\n l = year.length;\n year = year.substring(l - 2, l);\n }\n\n return year;\n }\n },\n Y: {\n // Long year like 1990\n name: 'Y',\n index: 0,\n extract () { return '(\\\\d{4})'; },\n parser: DateTimeFormatter.defaultNumberParser(),\n formatter (val) {\n const d = convertToNativeDate(val);\n const year = d.getFullYear().toString();\n\n return year;\n }\n }\n };\n\n return definitions;\n};\n\n/*\n * The tokens which works internally is not user friendly in terms of memorizing the names. This gives a formal\n * definition to the informal notations.\n *\n * @return {Object} : Formal definition of the tokens\n */\nDateTimeFormatter.getTokenFormalNames = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n\n return {\n HOUR: definitions.H,\n HOUR_12: definitions.l,\n AMPM_UPPERCASE: definitions.p,\n AMPM_LOWERCASE: definitions.P,\n MINUTE: definitions.M,\n SECOND: definitions.S,\n SHORT_DAY: definitions.a,\n LONG_DAY: definitions.A,\n DAY_OF_MONTH: definitions.e,\n DAY_OF_MONTH_CONSTANT_WIDTH: definitions.d,\n SHORT_MONTH: definitions.b,\n LONG_MONTH: definitions.B,\n MONTH_OF_YEAR: definitions.m,\n SHORT_YEAR: definitions.y,\n LONG_YEAR: definitions.Y\n };\n};\n\n/*\n * This defines the rules and declares dependencies that resolves a date parameter (year, month etc) from\n * the date time parameter array.\n *\n * @return {Object} : An object that contains dependencies and a resolver function. The dependencies values are fed\n * to the resolver function in that particular sequence only.\n */\nDateTimeFormatter.tokenResolver = function () {\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const defaultResolver = (...args) => { // eslint-disable-line require-jsdoc\n let i = 0;\n let arg;\n let targetParam;\n const l = args.length;\n\n for (; i < l; i++) {\n arg = args[i];\n if (args[i]) {\n targetParam = arg;\n }\n }\n\n if (!targetParam) { return null; }\n\n return targetParam[0].parser(targetParam[1]);\n };\n\n return {\n YEAR: [definitions.y, definitions.Y,\n defaultResolver\n ],\n MONTH: [definitions.b, definitions.B, definitions.m,\n defaultResolver\n ],\n DAY: [definitions.a, definitions.A, definitions.e, definitions.d,\n defaultResolver\n ],\n HOUR: [definitions.H, definitions.l, definitions.p, definitions.P,\n function (hourFormat24, hourFormat12, ampmLower, ampmUpper) {\n let targetParam;\n let amOrpm;\n let isPM;\n let val;\n\n if (hourFormat12 && (amOrpm = (ampmLower || ampmUpper))) {\n if (amOrpm[0].parser(amOrpm[1]) === 'pm') {\n isPM = true;\n }\n\n targetParam = hourFormat12;\n } else if (hourFormat12) {\n targetParam = hourFormat12;\n } else {\n targetParam = hourFormat24;\n }\n\n if (!targetParam) { return null; }\n\n val = targetParam[0].parser(targetParam[1]);\n if (isPM) {\n val += 12;\n }\n return val;\n }\n ],\n MINUTE: [definitions.M,\n defaultResolver\n ],\n SECOND: [definitions.S,\n defaultResolver\n ]\n };\n};\n\n/*\n * Finds token from the format rule specified by a user.\n * @param format {String} : The format of the input date specified by the user\n * @return {Array} : An array of objects which contains the available token and their occurence index in the format\n */\nDateTimeFormatter.findTokens = function (format) {\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenLiterals = Object.keys(definitions);\n const occurrence = [];\n let i;\n let forwardChar;\n\n while ((i = format.indexOf(tokenPrefix, i + 1)) >= 0) {\n forwardChar = format[i + 1];\n if (tokenLiterals.indexOf(forwardChar) === -1) { continue; }\n\n occurrence.push({\n index: i,\n token: forwardChar\n });\n }\n\n return occurrence;\n};\n\n/*\n * Format any JS date to a specified date given by user.\n *\n * @param date {Number | Date} : The date object which is to be formatted\n * @param format {String} : The format using which the date will be formatted for display\n */\nDateTimeFormatter.formatAs = function (date, format) {\n const nDate = convertToNativeDate(date);\n const occurrence = DateTimeFormatter.findTokens(format);\n const definitions = DateTimeFormatter.getTokenDefinitions();\n let formattedStr = String(format);\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n let token;\n let formattedVal;\n let i;\n let l;\n\n for (i = 0, l = occurrence.length; i < l; i++) {\n token = occurrence[i].token;\n formattedVal = definitions[token].formatter(nDate);\n formattedStr = formattedStr.replace(new RegExp(tokenPrefix + token, 'g'), formattedVal);\n }\n\n return formattedStr;\n};\n\n/*\n * Parses the user specified date string to extract the date time params.\n *\n * @return {Array} : Value of date time params in an array [year, month, day, hour, minutes, seconds, milli]\n */\nDateTimeFormatter.prototype.parse = function (dateTimeStamp, options) {\n const tokenResolver = DateTimeFormatter.tokenResolver();\n const dtParams = this.extractTokenValue(dateTimeStamp);\n const dtParamSeq = DateTimeFormatter.DATETIME_PARAM_SEQUENCE;\n const noBreak = options && options.noBreak;\n const dtParamArr = [];\n const args = [];\n let resolverKey;\n let resolverParams;\n let resolverFn;\n let val;\n let i;\n let param;\n let resolvedVal;\n let l;\n let result = [];\n\n for (resolverKey in tokenResolver) {\n if (!{}.hasOwnProperty.call(tokenResolver, resolverKey)) { continue; }\n\n args.length = 0;\n resolverParams = tokenResolver[resolverKey];\n resolverFn = resolverParams.splice(resolverParams.length - 1, 1)[0];\n\n for (i = 0, l = resolverParams.length; i < l; i++) {\n param = resolverParams[i];\n val = dtParams[param.name];\n\n if (val === undefined) {\n args.push(null);\n } else {\n args.push([param, val]);\n }\n }\n\n resolvedVal = resolverFn.apply(this, args);\n\n if ((resolvedVal === undefined || resolvedVal === null) && !noBreak) {\n break;\n }\n\n dtParamArr[dtParamSeq[resolverKey]] = resolvedVal;\n }\n\n if (dtParamArr.length && this.checkIfOnlyYear(dtParamArr.length))\n {\n result.unshift(dtParamArr[0], 0, 1); }\n else {\n result.unshift(...dtParamArr);\n }\n\n return result;\n};\n\n/*\n * Extract the value of the token from user specified date time string.\n *\n * @return {Object} : An key value pair which contains the tokens as key and value as pair\n */\nDateTimeFormatter.prototype.extractTokenValue = function (dateTimeStamp) {\n const format = this.format;\n const definitions = DateTimeFormatter.getTokenDefinitions();\n const tokenPrefix = DateTimeFormatter.TOKEN_PREFIX;\n const occurrence = DateTimeFormatter.findTokens(format);\n const tokenObj = {};\n\n let lastOccurrenceIndex;\n let occObj;\n let occIndex;\n let targetText;\n let regexFormat;\n\n let l;\n let i;\n\n regexFormat = String(format);\n\n const tokenArr = occurrence.map(obj => obj.token);\n const occurrenceLength = occurrence.length;\n for (i = occurrenceLength - 1; i >= 0; i--) {\n occIndex = occurrence[i].index;\n\n if (occIndex + 1 === regexFormat.length - 1) {\n lastOccurrenceIndex = occIndex;\n continue;\n }\n\n if (lastOccurrenceIndex === undefined) {\n lastOccurrenceIndex = regexFormat.length;\n }\n\n targetText = regexFormat.substring(occIndex + 2, lastOccurrenceIndex);\n regexFormat = regexFormat.substring(0, occIndex + 2) +\n RegExp.escape(targetText) +\n regexFormat.substring(lastOccurrenceIndex, regexFormat.length);\n\n lastOccurrenceIndex = occIndex;\n }\n\n for (i = 0; i < occurrenceLength; i++) {\n occObj = occurrence[i];\n regexFormat = regexFormat.replace(tokenPrefix + occObj.token, definitions[occObj.token].extract());\n }\n\n const extractValues = dateTimeStamp.match(new RegExp(regexFormat)) || [];\n extractValues.shift();\n\n for (i = 0, l = tokenArr.length; i < l; i++) {\n tokenObj[tokenArr[i]] = extractValues[i];\n }\n return tokenObj;\n};\n\n/*\n * Give back the JS native date formed from user specified date string\n *\n * @return {Date} : Native JS Date\n */\nDateTimeFormatter.prototype.getNativeDate = function (dateTimeStamp) {\n let date = null;\n if (Number.isFinite(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n } else if (!this.format && Date.parse(dateTimeStamp)) {\n date = new Date(dateTimeStamp);\n }\n else {\n const dtParams = this.dtParams = this.parse(dateTimeStamp);\n if (dtParams.length) {\n this.nativeDate = new Date(...dtParams);\n date = this.nativeDate;\n }\n }\n return date;\n};\n\nDateTimeFormatter.prototype.checkIfOnlyYear = function(len) {\n return len === 1 && this.format.match(/y|Y/g).length;\n};\n\n/*\n * Represents JS native date to a user specified format.\n *\n * @param format {String} : The format according to which the date is to be represented\n * @return {String} : The formatted date string\n */\nDateTimeFormatter.prototype.formatAs = function (format, dateTimeStamp) {\n let nativeDate;\n\n if (dateTimeStamp) {\n nativeDate = this.nativeDate = this.getNativeDate(dateTimeStamp);\n } else if (!(nativeDate = this.nativeDate)) {\n nativeDate = this.getNativeDate(dateTimeStamp);\n }\n\n return DateTimeFormatter.formatAs(nativeDate, format);\n};\n\nexport { DateTimeFormatter as default };\n","/**\n * The utility function to calculate major column.\n *\n * @param {Object} store - The store object.\n * @return {Function} Returns the push function.\n */\nexport default (store) => {\n let i = 0;\n return (...fields) => {\n fields.forEach((val, fieldIndex) => {\n if (!(store[fieldIndex] instanceof Array)) {\n store[fieldIndex] = Array.from({ length: i });\n }\n store[fieldIndex].push(val);\n });\n i++;\n };\n};\n","/* eslint-disable */\nconst OBJECTSTRING = 'object';\nconst objectToStrFn = Object.prototype.toString;\nconst objectToStr = '[object Object]';\nconst arrayToStr = '[object Array]';\n\nfunction checkCyclicRef(obj, parentArr) {\n let i = parentArr.length;\n let bIndex = -1;\n\n while (i) {\n if (obj === parentArr[i]) {\n bIndex = i;\n return bIndex;\n }\n i -= 1;\n }\n\n return bIndex;\n}\n\nfunction merge(obj1, obj2, skipUndef, tgtArr, srcArr) {\n var item,\n srcVal,\n tgtVal,\n str,\n cRef;\n // check whether obj2 is an array\n // if array then iterate through it's index\n // **** MOOTOOLS precution\n\n if (!srcArr) {\n tgtArr = [obj1];\n srcArr = [obj2];\n }\n else {\n tgtArr.push(obj1);\n srcArr.push(obj2);\n }\n\n if (obj2 instanceof Array) {\n for (item = 0; item < obj2.length; item += 1) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (typeof tgtVal !== OBJECTSTRING) {\n if (!(skipUndef && tgtVal === undefined)) {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = tgtVal instanceof Array ? [] : {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n }\n }\n else {\n for (item in obj2) {\n try {\n srcVal = obj1[item];\n tgtVal = obj2[item];\n }\n catch (e) {\n continue;\n }\n\n if (tgtVal !== null && typeof tgtVal === OBJECTSTRING) {\n // Fix for issue BUG: FWXT-602\n // IE < 9 Object.prototype.toString.call(null) gives\n // '[object Object]' instead of '[object Null]'\n // that's why null value becomes Object in IE < 9\n str = objectToStrFn.call(tgtVal);\n if (str === objectToStr) {\n if (srcVal === null || typeof srcVal !== OBJECTSTRING) {\n srcVal = obj1[item] = {};\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else if (str === arrayToStr) {\n if (srcVal === null || !(srcVal instanceof Array)) {\n srcVal = obj1[item] = [];\n }\n cRef = checkCyclicRef(tgtVal, srcArr);\n if (cRef !== -1) {\n srcVal = obj1[item] = tgtArr[cRef];\n }\n else {\n merge(srcVal, tgtVal, skipUndef, tgtArr, srcArr);\n }\n }\n else {\n obj1[item] = tgtVal;\n }\n }\n else {\n if (skipUndef && tgtVal === undefined) {\n continue;\n }\n obj1[item] = tgtVal;\n }\n }\n }\n return obj1;\n}\n\n\nfunction extend2 (obj1, obj2, skipUndef) {\n //if none of the arguments are object then return back\n if (typeof obj1 !== OBJECTSTRING && typeof obj2 !== OBJECTSTRING) {\n return null;\n }\n\n if (typeof obj2 !== OBJECTSTRING || obj2 === null) {\n return obj1;\n }\n\n if (typeof obj1 !== OBJECTSTRING) {\n obj1 = obj2 instanceof Array ? [] : {};\n }\n merge(obj1, obj2, skipUndef);\n return obj1;\n}\n\nexport { extend2 as default };\n","import { DataFormat } from '../enums';\n\n/**\n * Checks whether the value is an array.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an array otherwise returns false.\n */\nexport function isArray (val) {\n return Array.isArray(val);\n}\n\n/**\n * Checks whether the value is an object.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is an object otherwise returns false.\n */\nexport function isObject (val) {\n return val === Object(val);\n}\n\n/**\n * Checks whether the value is a string value.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is a string value otherwise returns false.\n */\nexport function isString (val) {\n return typeof val === 'string';\n}\n\n/**\n * Checks whether the value is callable.\n *\n * @param {*} val - The value to be checked.\n * @return {boolean} Returns true if the value is callable otherwise returns false.\n */\nexport function isCallable (val) {\n return typeof val === 'function';\n}\n\n/**\n * Returns the unique values from the input array.\n *\n * @param {Array} data - The input array.\n * @return {Array} Returns a new array of unique values.\n */\nexport function uniqueValues (data) {\n return [...new Set(data)];\n}\n\nexport const getUniqueId = () => `id-${new Date().getTime()}${Math.round(Math.random() * 10000)}`;\n\n/**\n * Checks Whether two arrays have same content.\n *\n * @param {Array} arr1 - The first array.\n * @param {Array} arr2 - The 2nd array.\n * @return {boolean} Returns whether two array have same content.\n */\nexport function isArrEqual(arr1, arr2) {\n if (!isArray(arr1) || !isArray(arr2)) {\n return arr1 === arr2;\n }\n\n if (arr1.length !== arr2.length) {\n return false;\n }\n\n for (let i = 0; i < arr1.length; i++) {\n if (arr1[i] !== arr2[i]) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * It is the default number format function for the measure field type.\n *\n * @param {any} val - The input value.\n * @return {number} Returns a number value.\n */\nexport function formatNumber(val) {\n return val;\n}\n\n/**\n * Returns the detected data format.\n *\n * @param {any} data - The input data to be tested.\n * @return {string} Returns the data format name.\n */\nexport const detectDataFormat = (data) => {\n if (isString(data)) {\n return DataFormat.DSV_STR;\n } else if (isArray(data) && isArray(data[0])) {\n return DataFormat.DSV_ARR;\n } else if (isArray(data) && (data.length === 0 || isObject(data[0]))) {\n return DataFormat.FLAT_JSON;\n }\n return null;\n};\n","import { FieldType } from './enums';\nimport { getUniqueId } from './utils';\n\nconst fieldStore = {\n data: {},\n\n createNamespace (fieldArr, name) {\n const dataId = name || getUniqueId();\n\n this.data[dataId] = {\n name: dataId,\n fields: fieldArr,\n\n fieldsObj () {\n let fieldsObj = this._cachedFieldsObj;\n\n if (!fieldsObj) {\n fieldsObj = this._cachedFieldsObj = {};\n this.fields.forEach((field) => {\n fieldsObj[field.name()] = field;\n });\n }\n return fieldsObj;\n },\n getMeasure () {\n let measureFields = this._cachedMeasure;\n\n if (!measureFields) {\n measureFields = this._cachedMeasure = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.MEASURE) {\n measureFields[field.name()] = field;\n }\n });\n }\n return measureFields;\n },\n getDimension () {\n let dimensionFields = this._cachedDimension;\n\n if (!this._cachedDimension) {\n dimensionFields = this._cachedDimension = {};\n this.fields.forEach((field) => {\n if (field.schema().type === FieldType.DIMENSION) {\n dimensionFields[field.name()] = field;\n }\n });\n }\n return dimensionFields;\n },\n };\n return this.data[dataId];\n },\n};\n\nexport default fieldStore;\n","import { getNumberFormattedVal } from './helper';\n\n/**\n * The wrapper class on top of the primitive value of a field.\n *\n * @todo Need to have support for StringValue, NumberValue, DateTimeValue\n * and GeoValue. These types should expose predicate API mostly.\n */\nclass Value {\n\n /**\n * Creates new Value instance.\n *\n * @param {*} val - the primitive value from the field cell.\n * @param {string | Field} field - The field from which the value belongs.\n */\n constructor (value, rawValue, field) {\n const formattedValue = getNumberFormattedVal(field, value);\n\n Object.defineProperties(this, {\n _value: {\n enumerable: false,\n configurable: false,\n writable: false,\n value\n },\n _formattedValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: formattedValue\n },\n _internalValue: {\n enumerable: false,\n configurable: false,\n writable: false,\n value: rawValue\n }\n });\n\n this.field = field;\n }\n\n /**\n * Returns the field value.\n *\n * @return {*} Returns the current value.\n */\n get value () {\n return this._value;\n }\n\n /**\n * Returns the parsed value of field\n */\n get formattedValue () {\n return this._formattedValue;\n }\n\n /**\n * Returns the internal value of field\n */\n get internalValue () {\n return this._internalValue;\n }\n\n /**\n * Converts to human readable string.\n *\n * @override\n * @return {string} Returns a human readable string of the field value.\n *\n */\n toString () {\n return String(this.value);\n }\n\n /**\n * Returns the value of the field.\n *\n * @override\n * @return {*} Returns the field value.\n */\n valueOf () {\n return this.value;\n }\n}\n\nexport default Value;\n","/**\n * Iterates through the diffSet array and call the callback with the current\n * index.\n *\n * @param {string} rowDiffset - The row diffset string e.g. '0-4,6,10-13'.\n * @param {Function} callback - The callback function to be called with every index.\n */\nexport function rowDiffsetIterator (rowDiffset, callback) {\n if (rowDiffset.length > 0) {\n const rowDiffArr = rowDiffset.split(',');\n rowDiffArr.forEach((diffStr) => {\n const diffStsArr = diffStr.split('-');\n const start = +(diffStsArr[0]);\n const end = +(diffStsArr[1] || diffStsArr[0]);\n if (end >= start) {\n for (let i = start; i <= end; i += 1) {\n callback(i);\n }\n }\n });\n }\n}\n","/**\n * A parser to parser null, undefined, invalid and NIL values.\n *\n * @public\n * @class\n */\nclass InvalidAwareTypes {\n /**\n * Static method which gets/sets the invalid value registry.\n *\n * @public\n * @param {Object} config - The custom configuration supplied by user.\n * @return {Object} Returns the invalid values registry.\n */\n static invalidAwareVals (config) {\n if (!config) {\n return InvalidAwareTypes._invalidAwareValsMap;\n }\n return Object.assign(InvalidAwareTypes._invalidAwareValsMap, config);\n }\n\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} value - The value of the invalid data type.\n */\n constructor (value) {\n this._value = value;\n }\n\n /**\n * Returns the current value of the instance.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n value () {\n return this._value;\n }\n\n /**\n * Returns the current value of the instance in string format.\n *\n * @public\n * @return {string} Returns the value of the invalid data type.\n */\n toString () {\n return String(this._value);\n }\n\n static isInvalid(val) {\n return (val instanceof InvalidAwareTypes) || !!InvalidAwareTypes.invalidAwareVals()[val];\n }\n\n static getInvalidType(val) {\n return val instanceof InvalidAwareTypes ? val : InvalidAwareTypes.invalidAwareVals()[val];\n }\n}\n\n/**\n * Enums for Invalid types.\n */\nInvalidAwareTypes.NULL = new InvalidAwareTypes('null');\nInvalidAwareTypes.NA = new InvalidAwareTypes('na');\nInvalidAwareTypes.NIL = new InvalidAwareTypes('nil');\n\n/**\n * Default Registry for mapping the invalid values.\n *\n * @private\n */\nInvalidAwareTypes._invalidAwareValsMap = {\n invalid: InvalidAwareTypes.NA,\n nil: InvalidAwareTypes.NIL,\n null: InvalidAwareTypes.NULL,\n undefined: InvalidAwareTypes.NA\n};\n\nexport default InvalidAwareTypes;\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nconst generateBuckets = (binSize, start, end) => {\n const buckets = [];\n let next = start;\n\n while (next < end) {\n buckets.push(next);\n next += binSize;\n }\n buckets.push(next);\n\n return buckets;\n};\n\nconst findBucketRange = (bucketRanges, value) => {\n let leftIdx = 0;\n let rightIdx = bucketRanges.length - 1;\n let midIdx;\n let range;\n\n // Here use binary search as the bucketRanges is a sorted array\n while (leftIdx <= rightIdx) {\n midIdx = leftIdx + Math.floor((rightIdx - leftIdx) / 2);\n range = bucketRanges[midIdx];\n\n if (value >= range.start && value < range.end) {\n return range;\n } else if (value >= range.end) {\n leftIdx = midIdx + 1;\n } else if (value < range.start) {\n rightIdx = midIdx - 1;\n }\n }\n\n return null;\n};\n\n /**\n * Creates the bin data from input measure field and supplied configs.\n *\n * @param {Measure} measureField - The Measure field instance.\n * @param {string} rowDiffset - The datamodel rowDiffset values.\n * @param {Object} config - The config object.\n * @return {Object} Returns the binned data and the corresponding bins.\n */\nexport function createBinnedFieldData (measureField, rowDiffset, config) {\n let { buckets, binsCount, binSize, start, end } = config;\n const [dMin, dMax] = measureField.domain();\n\n if (!buckets) {\n start = (start !== 0 && (!start || start > dMin)) ? dMin : start;\n end = (end !== 0 && (!end || end < dMax)) ? (dMax + 1) : end;\n\n if (binsCount) {\n binSize = Math.ceil(Math.abs(end - start) / binsCount);\n }\n\n buckets = generateBuckets(binSize, start, end);\n }\n\n if (buckets[0] > dMin) {\n buckets.unshift(dMin);\n }\n if (buckets[buckets.length - 1] <= dMax) {\n buckets.push(dMax + 1);\n }\n\n const bucketRanges = [];\n for (let i = 0; i < buckets.length - 1; i++) {\n bucketRanges.push({\n start: buckets[i],\n end: buckets[i + 1]\n });\n }\n\n const binnedData = [];\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = measureField.partialField.data[i];\n if (datum instanceof InvalidAwareTypes) {\n binnedData.push(datum);\n return;\n }\n\n const range = findBucketRange(bucketRanges, datum);\n binnedData.push(`${range.start}-${range.end}`);\n });\n\n return { binnedData, bins: buckets };\n}\n","export { DataFormat, FilteringMode } from '../enums';\n/**\n * The event name for data propagation.\n */\nexport const PROPAGATION = 'propagation';\n\n/**\n * The name of the unique row id column in DataModel.\n */\nexport const ROW_ID = '__id__';\n\n/**\n * The enums for operation names performed on DataModel.\n */\nexport const DM_DERIVATIVES = {\n SELECT: 'select',\n PROJECT: 'project',\n GROUPBY: 'group',\n COMPOSE: 'compose',\n CAL_VAR: 'calculatedVariable',\n BIN: 'bin',\n SORT: 'sort'\n};\n\nexport const JOINS = {\n CROSS: 'cross',\n LEFTOUTER: 'leftOuter',\n RIGHTOUTER: 'rightOuter',\n NATURAL: 'natural',\n FULLOUTER: 'fullOuter'\n};\n\nexport const LOGICAL_OPERATORS = {\n AND: 'and',\n OR: 'or'\n};\n","/**\n * The helper function that returns an array of common schema\n * from two fieldStore instances.\n *\n * @param {FieldStore} fs1 - The first FieldStore instance.\n * @param {FieldStore} fs2 - The second FieldStore instance.\n * @return {Array} An array containing the common schema.\n */\nexport function getCommonSchema (fs1, fs2) {\n const retArr = [];\n const fs1Arr = [];\n fs1.fields.forEach((field) => {\n fs1Arr.push(field.schema().name);\n });\n fs2.fields.forEach((field) => {\n if (fs1Arr.indexOf(field.schema().name) !== -1) {\n retArr.push(field.schema().name);\n }\n });\n return retArr;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { getCommonSchema } from './get-common-schema';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { JOINS } from '../constants';\nimport { prepareJoinData } from '../helper';\n/**\n * Default filter function for crossProduct.\n *\n * @return {boolean} Always returns true.\n */\nfunction defaultFilterFn() { return true; }\n\n/**\n * Implementation of cross product operation between two DataModel instances.\n * It internally creates the data and schema for the new DataModel.\n *\n * @param {DataModel} dataModel1 - The left DataModel instance.\n * @param {DataModel} dataModel2 - The right DataModel instance.\n * @param {Function} filterFn - The filter function which is used to filter the tuples.\n * @param {boolean} [replaceCommonSchema=false] - The flag if the common name schema should be there.\n * @return {DataModel} Returns The newly created DataModel instance from the crossProduct operation.\n */\nexport function crossProduct (dm1, dm2, filterFn, replaceCommonSchema = false, jointype = JOINS.CROSS) {\n const schema = [];\n const data = [];\n const applicableFilterFn = filterFn || defaultFilterFn;\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreName = dm1FieldStore.name;\n const dm2FieldStoreName = dm2FieldStore.name;\n const name = `${dm1FieldStore.name}.${dm2FieldStore.name}`;\n const commonSchemaList = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n if (dm1FieldStoreName === dm2FieldStoreName) {\n throw new Error('DataModels must have different alias names');\n }\n // Here prepare the schema\n dm1FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1 && !replaceCommonSchema) {\n tmpSchema.name = `${dm1FieldStore.name}.${tmpSchema.name}`;\n }\n schema.push(tmpSchema);\n });\n dm2FieldStore.fields.forEach((field) => {\n const tmpSchema = extend2({}, field.schema());\n if (commonSchemaList.indexOf(tmpSchema.name) !== -1) {\n if (!replaceCommonSchema) {\n tmpSchema.name = `${dm2FieldStore.name}.${tmpSchema.name}`;\n schema.push(tmpSchema);\n }\n } else {\n schema.push(tmpSchema);\n }\n });\n\n // Here prepare Data\n rowDiffsetIterator(dm1._rowDiffset, (i) => {\n let rowAdded = false;\n let rowPosition;\n rowDiffsetIterator(dm2._rowDiffset, (ii) => {\n const tuple = [];\n const userArg = {};\n userArg[dm1FieldStoreName] = {};\n userArg[dm2FieldStoreName] = {};\n dm1FieldStore.fields.forEach((field) => {\n tuple.push(field.partialField.data[i]);\n userArg[dm1FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[i],\n formattedValue: field.formattedData()[i],\n };\n });\n dm2FieldStore.fields.forEach((field) => {\n if (!(commonSchemaList.indexOf(field.schema().name) !== -1 && replaceCommonSchema)) {\n tuple.push(field.partialField.data[ii]);\n }\n userArg[dm2FieldStoreName][field.name()] = {\n rawValue: field.partialField.data[ii],\n formattedValue: field.formattedData()[ii],\n };\n });\n\n let cachedStore = {};\n let cloneProvider1 = () => dm1.detachedRoot();\n let cloneProvider2 = () => dm2.detachedRoot();\n\n const dm1Fields = prepareJoinData(userArg[dm1FieldStoreName]);\n const dm2Fields = prepareJoinData(userArg[dm2FieldStoreName]);\n if (applicableFilterFn(dm1Fields, dm2Fields, cloneProvider1, cloneProvider2, cachedStore)) {\n const tupleObj = {};\n tuple.forEach((cellVal, iii) => {\n tupleObj[schema[iii].name] = cellVal;\n });\n if (rowAdded && JOINS.CROSS !== jointype) {\n data[rowPosition] = tupleObj;\n }\n else {\n data.push(tupleObj);\n rowAdded = true;\n rowPosition = i;\n }\n } else if ((jointype === JOINS.LEFTOUTER || jointype === JOINS.RIGHTOUTER) && !rowAdded) {\n const tupleObj = {};\n let len = dm1FieldStore.fields.length - 1;\n tuple.forEach((cellVal, iii) => {\n if (iii <= len) {\n tupleObj[schema[iii].name] = cellVal;\n }\n else {\n tupleObj[schema[iii].name] = null;\n }\n });\n rowAdded = true;\n rowPosition = i;\n data.push(tupleObj);\n }\n });\n });\n\n return new DataModel(data, schema, { name });\n}\n","/**\n * The default sort function.\n *\n * @param {*} a - The first value.\n * @param {*} b - The second value.\n * @return {number} Returns the comparison result e.g. 1 or 0 or -1.\n */\nfunction defSortFn (a, b) {\n const a1 = `${a}`;\n const b1 = `${b}`;\n if (a1 < b1) {\n return -1;\n }\n if (a1 > b1) {\n return 1;\n }\n return 0;\n}\n\n/**\n * The helper function for merge sort which creates the sorted array\n * from the two halves of the input array.\n *\n * @param {Array} arr - The target array which needs to be merged.\n * @param {number} lo - The starting index of the first array half.\n * @param {number} mid - The ending index of the first array half.\n * @param {number} hi - The ending index of the second array half.\n * @param {Function} sortFn - The sort function.\n */\nfunction merge (arr, lo, mid, hi, sortFn) {\n const mainArr = arr;\n const auxArr = [];\n for (let i = lo; i <= hi; i += 1) {\n auxArr[i] = mainArr[i];\n }\n let a = lo;\n let b = mid + 1;\n\n for (let i = lo; i <= hi; i += 1) {\n if (a > mid) {\n mainArr[i] = auxArr[b];\n b += 1;\n } else if (b > hi) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else if (sortFn(auxArr[a], auxArr[b]) <= 0) {\n mainArr[i] = auxArr[a];\n a += 1;\n } else {\n mainArr[i] = auxArr[b];\n b += 1;\n }\n }\n}\n\n/**\n * The helper function for merge sort which would be called\n * recursively for sorting the array halves.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {number} lo - The starting index of the array half.\n * @param {number} hi - The ending index of the array half.\n * @param {Function} sortFn - The sort function.\n * @return {Array} Returns the target array itself.\n */\nfunction sort (arr, lo, hi, sortFn) {\n if (hi === lo) { return arr; }\n\n const mid = lo + Math.floor((hi - lo) / 2);\n sort(arr, lo, mid, sortFn);\n sort(arr, mid + 1, hi, sortFn);\n merge(arr, lo, mid, hi, sortFn);\n\n return arr;\n}\n\n/**\n * The implementation of merge sort.\n * It is used in DataModel for stable sorting as it is not sure\n * what the sorting algorithm used by browsers is stable or not.\n *\n * @param {Array} arr - The target array which needs to be sorted.\n * @param {Function} [sortFn=defSortFn] - The sort function.\n * @return {Array} Returns the input array itself in sorted order.\n */\nexport function mergeSort (arr, sortFn = defSortFn) {\n if (arr.length > 1) {\n sort(arr, 0, arr.length - 1, sortFn);\n }\n return arr;\n}\n","import { DimensionSubtype, MeasureSubtype } from '../enums';\nimport { mergeSort } from './merge-sort';\nimport { fieldInSchema } from '../helper';\nimport { isCallable, isArray } from '../utils';\n\n/**\n * Generates the sorting functions to sort the data of a DataModel instance\n * according to the input data type.\n *\n * @param {string} dataType - The data type e.g. 'measure', 'datetime' etc.\n * @param {string} sortType - The sorting order i.e. 'asc' or 'desc'.\n * @return {Function} Returns the the sorting function.\n */\nfunction getSortFn (dataType, sortType) {\n let retFunc;\n\n switch (dataType) {\n case MeasureSubtype.CONTINUOUS:\n case DimensionSubtype.TEMPORAL:\n if (sortType === 'asc') {\n retFunc = (a, b) => a - b;\n } else {\n retFunc = (a, b) => b - a;\n }\n break;\n default:\n if (sortType === 'asc') {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? 1 : -1;\n };\n } else {\n retFunc = (a, b) => {\n a = `${a}`;\n b = `${b}`;\n if (a === b) {\n return 0;\n }\n return a > b ? -1 : 1;\n };\n }\n }\n\n return retFunc;\n}\n\n/**\n * Resolves the actual sorting function based on sorting string value.\n *\n * @param {Object} fDetails - The target field info.\n * @param {string} strSortOrder - The sort order value.\n * @return {Function} Returns the sorting function.\n */\nfunction resolveStrSortOrder (fDetails, strSortOrder) {\n const sortOrder = String(strSortOrder).toLowerCase() === 'desc' ? 'desc' : 'asc';\n return getSortFn(fDetails.type, sortOrder);\n}\n\n/**\n * Groups the data according to the specified target field.\n *\n * @param {Array} data - The input data array.\n * @param {number} fieldIndex - The target field index within schema array.\n * @return {Array} Returns an array containing the grouped data.\n */\nfunction groupData (data, fieldIndex) {\n const hashMap = new Map();\n const groupedData = [];\n\n data.forEach((datum) => {\n const fieldVal = datum[fieldIndex];\n if (hashMap.has(fieldVal)) {\n groupedData[hashMap.get(fieldVal)][1].push(datum);\n } else {\n groupedData.push([fieldVal, [datum]]);\n hashMap.set(fieldVal, groupedData.length - 1);\n }\n });\n\n return groupedData;\n}\n\n/**\n * Creates the argument value used for sorting function when sort is done\n * with another fields.\n *\n * @param {Array} groupedDatum - The grouped datum for a single dimension field value.\n * @param {Array} targetFields - An array of the sorting fields.\n * @param {Array} targetFieldDetails - An array of the sorting field details in schema.\n * @return {Object} Returns an object containing the value of sorting fields and the target field name.\n */\nfunction createSortingFnArg (groupedDatum, targetFields, targetFieldDetails) {\n const arg = {\n label: groupedDatum[0]\n };\n\n targetFields.reduce((acc, next, idx) => {\n acc[next] = groupedDatum[1].map(datum => datum[targetFieldDetails[idx].index]);\n return acc;\n }, arg);\n\n return arg;\n}\n\n/**\n * Sorts the data by applying the standard sorting mechanism.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nfunction applyStandardSort (data, schema, sortingDetails) {\n let fieldName;\n let sortMeta;\n let fDetails;\n let i = sortingDetails.length - 1;\n\n for (; i >= 0; i--) {\n fieldName = sortingDetails[i][0];\n sortMeta = sortingDetails[i][1];\n fDetails = fieldInSchema(schema, fieldName);\n\n if (!fDetails) {\n // eslint-disable-next-line no-continue\n continue;\n }\n\n if (isCallable(sortMeta)) {\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortMeta(a[fDetails.index], b[fDetails.index]));\n } else if (isArray(sortMeta)) {\n const groupedData = groupData(data, fDetails.index);\n const sortingFn = sortMeta[sortMeta.length - 1];\n const targetFields = sortMeta.slice(0, sortMeta.length - 1);\n const targetFieldDetails = targetFields.map(f => fieldInSchema(schema, f));\n\n groupedData.forEach((groupedDatum) => {\n groupedDatum.push(createSortingFnArg(groupedDatum, targetFields, targetFieldDetails));\n });\n\n mergeSort(groupedData, (a, b) => {\n const m = a[2];\n const n = b[2];\n return sortingFn(m, n);\n });\n\n // Empty the array\n data.length = 0;\n groupedData.forEach((datum) => {\n data.push(...datum[1]);\n });\n } else {\n const sortFn = resolveStrSortOrder(fDetails, sortMeta);\n // eslint-disable-next-line no-loop-func\n mergeSort(data, (a, b) => sortFn(a[fDetails.index], b[fDetails.index]));\n }\n }\n}\n\n/**\n * Creates a map based on grouping.\n *\n * @param {Array} depColumns - The dependency columns' info.\n * @param {Array} data - The input data.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - The sorting details for standard sorting.\n * @return {Map} Returns a map.\n */\nconst makeGroupMapAndSort = (depColumns, data, schema, sortingDetails) => {\n if (depColumns.length === 0) { return data; }\n\n const targetCol = depColumns[0];\n const map = new Map();\n\n data.reduce((acc, currRow) => {\n const fVal = currRow[targetCol.index];\n if (acc.has(fVal)) {\n acc.get(fVal).push(currRow);\n } else {\n acc.set(fVal, [currRow]);\n }\n return acc;\n }, map);\n\n for (let [key, val] of map) {\n const nMap = makeGroupMapAndSort(depColumns.slice(1), val, schema, sortingDetails);\n map.set(key, nMap);\n if (Array.isArray(nMap)) {\n applyStandardSort(nMap, schema, sortingDetails);\n }\n }\n\n return map;\n};\n\n/**\n * Sorts the data by retaining the position/order of a particular field.\n *\n * @param {Array} data - The input data array.\n * @param {Array} schema - The data schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n * @param {Array} depColumns - The dependency column list.\n * @return {Array} Returns the sorted data.\n */\nfunction applyGroupSort (data, schema, sortingDetails, depColumns) {\n sortingDetails = sortingDetails.filter((detail) => {\n if (detail[1] === null) {\n depColumns.push(detail[0]);\n return false;\n }\n return true;\n });\n if (sortingDetails.length === 0) { return data; }\n\n depColumns = depColumns.map(c => fieldInSchema(schema, c));\n\n const sortedGroupMap = makeGroupMapAndSort(depColumns, data, schema, sortingDetails);\n return data.map((row) => {\n let i = 0;\n let nextMap = sortedGroupMap;\n\n while (!Array.isArray(nextMap)) {\n nextMap = nextMap.get(row[depColumns[i++].index]);\n }\n\n return nextMap.shift();\n });\n}\n\n/**\n * Sorts the data.\n *\n * @param {Object} dataObj - An object containing the data and schema.\n * @param {Array} sortingDetails - An array containing the sorting configs.\n */\nexport function sortData (dataObj, sortingDetails) {\n let { schema, data } = dataObj;\n\n sortingDetails = sortingDetails.filter(sDetial => !!fieldInSchema(schema, sDetial[0]));\n if (sortingDetails.length === 0) { return; }\n\n let groupSortingIdx = sortingDetails.findIndex(sDetial => sDetial[1] === null);\n groupSortingIdx = groupSortingIdx !== -1 ? groupSortingIdx : sortingDetails.length;\n\n const standardSortingDetails = sortingDetails.slice(0, groupSortingIdx);\n const groupSortingDetails = sortingDetails.slice(groupSortingIdx);\n\n applyStandardSort(data, schema, standardSortingDetails);\n data = applyGroupSort(data, schema, groupSortingDetails, standardSortingDetails.map(detail => detail[0]));\n\n dataObj.uids = data.map(row => row.pop());\n dataObj.data = data;\n}\n","import { rowDiffsetIterator } from './row-diffset-iterator';\nimport { sortData } from './sort';\nimport { FieldType } from '../enums';\nimport { ROW_ID } from '../constants';\n\n/**\n * Builds the actual data array.\n *\n * @param {Array} fieldStore - An array of field.\n * @param {string} rowDiffset - A string consisting of which rows to be included eg. '0-2,4,6';\n * @param {string} colIdentifier - A string consisting of the details of which column\n * to be included eg 'date,sales,profit';\n * @param {Object} sortingDetails - An object containing the sorting details of the DataModel instance.\n * @param {Object} options - The options required to create the type of the data.\n * @return {Object} Returns an object containing the multidimensional array and the relative schema.\n */\nexport function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetails, options) {\n const defOptions = {\n addUid: false,\n columnWise: false\n };\n options = Object.assign({}, defOptions, options);\n\n const retObj = {\n schema: [],\n data: [],\n uids: []\n };\n const addUid = options.addUid;\n const reqSorting = sortingDetails && sortingDetails.length > 0;\n // It stores the fields according to the colIdentifier argument\n const tmpDataArr = [];\n // Stores the fields according to the colIdentifier argument\n const colIArr = colIdentifier.split(',');\n\n colIArr.forEach((colName) => {\n for (let i = 0; i < fieldStore.length; i += 1) {\n if (fieldStore[i].name() === colName) {\n tmpDataArr.push(fieldStore[i]);\n break;\n }\n }\n });\n\n // Inserts the schema to the schema object\n tmpDataArr.forEach((field) => {\n /** @todo Need to use extend2 here otherwise user can overwrite the schema. */\n retObj.schema.push(field.schema());\n });\n\n if (addUid) {\n retObj.schema.push({\n name: ROW_ID,\n type: FieldType.DIMENSION\n });\n }\n\n rowDiffsetIterator(rowDiffset, (i) => {\n retObj.data.push([]);\n const insertInd = retObj.data.length - 1;\n let start = 0;\n tmpDataArr.forEach((field, ii) => {\n retObj.data[insertInd][ii + start] = field.partialField.data[i];\n });\n if (addUid) {\n retObj.data[insertInd][tmpDataArr.length] = i;\n }\n // Creates an array of unique identifiers for each row\n retObj.uids.push(i);\n\n // If sorting needed then there is the need to expose the index\n // mapping from the old index to its new index\n if (reqSorting) { retObj.data[insertInd].push(i); }\n });\n\n // Handles the sort functionality\n if (reqSorting) {\n sortData(retObj, sortingDetails);\n }\n\n if (options.columnWise) {\n const tmpData = Array(...Array(retObj.schema.length)).map(() => []);\n retObj.data.forEach((tuple) => {\n tuple.forEach((data, i) => {\n tmpData[i].push(data);\n });\n });\n retObj.data = tmpData;\n }\n\n return retObj;\n}\n","import DataModel from '../datamodel';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n\n/**\n * Performs the union operation between two dm instances.\n *\n * @todo Fix the conflicts between union and difference terminology here.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function difference (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n * @param {boolean} addData - If true only tuple will be added to the data.\n */\n function prepareDataHelper(dm, fieldsObj, addData) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n if (addData) { data.push(tuple); }\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm2, dm2FieldStoreFieldObj, false);\n prepareDataHelper(dm1, dm1FieldStoreFieldObj, true);\n\n return new DataModel(data, schema, { name });\n}\n\n","import { isArray } from '../utils';\nimport InvalidAwareTypes from '../invalid-aware-types';\nimport { GROUP_BY_FUNCTIONS } from '../enums';\n\nconst { SUM, AVG, FIRST, LAST, COUNT, STD, MIN, MAX } = GROUP_BY_FUNCTIONS;\n\nfunction getFilteredValues(arr) {\n return arr.filter(item => !(item instanceof InvalidAwareTypes));\n}\n/**\n * Reducer function that returns the sum of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the sum of the array.\n */\nfunction sum (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const filteredNumber = getFilteredValues(arr);\n const totalSum = filteredNumber.length ?\n filteredNumber.reduce((acc, curr) => acc + curr, 0)\n : InvalidAwareTypes.NULL;\n return totalSum;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that returns the average of all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the mean value of the array.\n */\nfunction avg (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n const totalSum = sum(arr);\n const len = arr.length || 1;\n return (Number.isNaN(totalSum) || totalSum instanceof InvalidAwareTypes) ?\n InvalidAwareTypes.NULL : totalSum / len;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the min value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the minimum value of the array.\n */\nfunction min (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.min(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the max value amongst all the values.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the maximum value of the array.\n */\nfunction max (arr) {\n if (isArray(arr) && !(arr[0] instanceof Array)) {\n // Filter out undefined, null and NaN values\n const filteredValues = getFilteredValues(arr);\n\n return (filteredValues.length) ? Math.max(...filteredValues) : InvalidAwareTypes.NULL;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Reducer function that gives the first value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the first value of the array.\n */\nfunction first (arr) {\n return arr[0];\n}\n\n/**\n * Reducer function that gives the last value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the last value of the array.\n */\nfunction last (arr) {\n return arr[arr.length - 1];\n}\n\n/**\n * Reducer function that gives the count value of the array.\n *\n * @public\n * @param {Array} arr - The input array.\n * @return {number} Returns the length of the array.\n */\nfunction count (arr) {\n if (isArray(arr)) {\n return arr.length;\n }\n return InvalidAwareTypes.NULL;\n}\n\n/**\n * Calculates the variance of the input array.\n *\n * @param {Array.} arr - The input array.\n * @return {number} Returns the variance of the input array.\n */\nfunction variance (arr) {\n let mean = avg(arr);\n return avg(arr.map(num => (num - mean) ** 2));\n}\n\n/**\n * Calculates the square root of the variance of the input array.\n *\n * @public\n * @param {Array.} arr - The input array.\n * @return {number} Returns the square root of the variance.\n */\nfunction std (arr) {\n return Math.sqrt(variance(arr));\n}\n\n\nconst fnList = {\n [SUM]: sum,\n [AVG]: avg,\n [MIN]: min,\n [MAX]: max,\n [FIRST]: first,\n [LAST]: last,\n [COUNT]: count,\n [STD]: std\n};\n\nconst defaultReducerName = SUM;\n\nexport {\n defaultReducerName,\n sum as defReducer,\n fnList,\n};\n","import { defReducer, fnList } from '../operator';\n\n/**\n * A page level storage which stores, registers, unregisters reducers for all the datamodel instances. There is only one\n * reducer store available in a page. All the datamodel instances receive same instance of reducer store. DataModel\n * out of the box provides handful of {@link reducer | reducers} which can be used as reducer funciton.\n *\n * @public\n * @namespace DataModel\n */\nclass ReducerStore {\n constructor () {\n this.store = new Map();\n this.store.set('defReducer', defReducer);\n\n Object.entries(fnList).forEach((key) => {\n this.store.set(key[0], key[1]);\n });\n }\n\n /**\n * Changes the `defaultReducer` globally. For all the fields which does not have `defAggFn` mentioned in schema, the\n * value of `defaultReducer` is used for aggregation.\n *\n * @public\n * @param {string} [reducer='sum'] - The name of the default reducer. It picks up the definition from store by doing\n * name lookup. If no name is found then it takes `sum` as the default reducer.\n * @return {ReducerStore} Returns instance of the singleton store in page.\n */\n defaultReducer (...params) {\n if (!params.length) {\n return this.store.get('defReducer');\n }\n\n let reducer = params[0];\n\n if (typeof reducer === 'function') {\n this.store.set('defReducer', reducer);\n } else {\n reducer = String(reducer);\n if (Object.keys(fnList).indexOf(reducer) !== -1) {\n this.store.set('defReducer', fnList[reducer]);\n } else {\n throw new Error(`Reducer ${reducer} not found in registry`);\n }\n }\n return this;\n }\n\n /**\n *\n * Registers a {@link reducer | reducer}.\n * A {@link reducer | reducer} has to be registered before it is used.\n *\n * @example\n * // find the mean squared value of a given set\n * const reducerStore = DataModel.Reducers();\n *\n * reducers.register('meanSquared', (arr) => {\n * const squaredVal = arr.map(item => item * item);\n * let sum = 0;\n * for (let i = 0, l = squaredVal.length; i < l; i++) {\n * sum += squaredVal[i++];\n * }\n *\n * return sum;\n * })\n *\n * // datamodel (dm) is already prepared with cars.json\n * const dm1 = dm.groupBy(['origin'], {\n * accleration: 'meanSquared'\n * });\n *\n * @public\n *\n * @param {string} name formal name for a reducer. If the given name already exists in store it is overridden by new\n * definition.\n * @param {Function} reducer definition of {@link reducer} function.\n *\n * @return {Function} function for unregistering the reducer.\n */\n register (name, reducer) {\n if (typeof reducer !== 'function') {\n throw new Error('Reducer should be a function');\n }\n\n name = String(name);\n this.store.set(name, reducer);\n\n return () => { this.__unregister(name); };\n }\n\n __unregister (name) {\n if (this.store.has(name)) {\n this.store.delete(name);\n }\n }\n\n resolve (name) {\n if (name instanceof Function) {\n return name;\n }\n return this.store.get(name);\n }\n}\n\nconst reducerStore = (function () {\n let store = null;\n\n function getStore () {\n if (store === null) {\n store = new ReducerStore();\n }\n return store;\n }\n return getStore();\n}());\n\nexport default reducerStore;\n","import { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport DataModel from '../export';\nimport reducerStore from '../utils/reducer-store';\nimport { defaultReducerName } from './group-by-function';\nimport { FieldType } from '../enums';\n\n/**\n * This function sanitize the user given field and return a common Array structure field\n * list\n * @param {DataModel} dataModel the dataModel operating on\n * @param {Array} fieldArr user input of field Array\n * @return {Array} arrays of field name\n */\nfunction getFieldArr (dataModel, fieldArr) {\n const retArr = [];\n const fieldStore = dataModel.getFieldspace();\n const dimensions = fieldStore.getDimension();\n\n Object.entries(dimensions).forEach(([key]) => {\n if (fieldArr && fieldArr.length) {\n if (fieldArr.indexOf(key) !== -1) {\n retArr.push(key);\n }\n } else {\n retArr.push(key);\n }\n });\n\n return retArr;\n}\n\n/**\n * This sanitize the reducer provide by the user and create a common type of object.\n * user can give function Also\n * @param {DataModel} dataModel dataModel to worked on\n * @param {Object|function} [reducers={}] reducer provided by the users\n * @return {Object} object containing reducer function for every measure\n */\nfunction getReducerObj (dataModel, reducers = {}) {\n const retObj = {};\n const fieldStore = dataModel.getFieldspace();\n const measures = fieldStore.getMeasure();\n const defReducer = reducerStore.defaultReducer();\n\n Object.keys(measures).forEach((measureName) => {\n if (typeof reducers[measureName] !== 'string') {\n reducers[measureName] = measures[measureName].defAggFn();\n }\n const reducerFn = reducerStore.resolve(reducers[measureName]);\n if (reducerFn) {\n retObj[measureName] = reducerFn;\n } else {\n retObj[measureName] = defReducer;\n reducers[measureName] = defaultReducerName;\n }\n });\n return retObj;\n}\n\n/**\n * main function which perform the group-by operations which reduce the measures value is the\n * fields are common according to the reducer function provided\n * @param {DataModel} dataModel the dataModel to worked\n * @param {Array} fieldArr fields according to which the groupby should be worked\n * @param {Object|Function} reducers reducers function\n * @param {DataModel} existingDataModel Existing datamodel instance\n * @return {DataModel} new dataModel with the group by\n */\nfunction groupBy (dataModel, fieldArr, reducers, existingDataModel) {\n const sFieldArr = getFieldArr(dataModel, fieldArr);\n const reducerObj = getReducerObj(dataModel, reducers);\n const fieldStore = dataModel.getFieldspace();\n const fieldStoreObj = fieldStore.fieldsObj();\n const dbName = fieldStore.name;\n const dimensionArr = [];\n const measureArr = [];\n const schema = [];\n const hashMap = {};\n const data = [];\n let newDataModel;\n\n // Prepare the schema\n Object.entries(fieldStoreObj).forEach(([key, value]) => {\n if (sFieldArr.indexOf(key) !== -1 || reducerObj[key]) {\n schema.push(extend2({}, value.schema()));\n\n switch (value.schema().type) {\n case FieldType.MEASURE:\n measureArr.push(key);\n break;\n default:\n case FieldType.DIMENSION:\n dimensionArr.push(key);\n }\n }\n });\n // Prepare the data\n let rowCount = 0;\n rowDiffsetIterator(dataModel._rowDiffset, (i) => {\n let hash = '';\n dimensionArr.forEach((_) => {\n hash = `${hash}-${fieldStoreObj[_].partialField.data[i]}`;\n });\n if (hashMap[hash] === undefined) {\n hashMap[hash] = rowCount;\n data.push({});\n dimensionArr.forEach((_) => {\n data[rowCount][_] = fieldStoreObj[_].partialField.data[i];\n });\n measureArr.forEach((_) => {\n data[rowCount][_] = [fieldStoreObj[_].partialField.data[i]];\n });\n rowCount += 1;\n } else {\n measureArr.forEach((_) => {\n data[hashMap[hash]][_].push(fieldStoreObj[_].partialField.data[i]);\n });\n }\n });\n\n // reduction\n let cachedStore = {};\n let cloneProvider = () => dataModel.detachedRoot();\n data.forEach((row) => {\n const tuple = row;\n measureArr.forEach((_) => {\n tuple[_] = reducerObj[_](row[_], cloneProvider, cachedStore);\n });\n });\n if (existingDataModel) {\n existingDataModel.__calculateFieldspace();\n newDataModel = existingDataModel;\n }\n else {\n newDataModel = new DataModel(data, schema, { name: dbName });\n }\n return newDataModel;\n}\n\nexport { groupBy, getFieldArr, getReducerObj };\n","import { getCommonSchema } from './get-common-schema';\n\n/**\n * The filter function used in natural join.\n * It generates a function that will have the logic to join two\n * DataModel instances by the process of natural join.\n *\n * @param {DataModel} dm1 - The left DataModel instance.\n * @param {DataModel} dm2 - The right DataModel instance.\n * @return {Function} Returns a function that is used in cross-product operation.\n */\nexport function naturalJoinFilter (dm1, dm2) {\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n // const dm1FieldStoreName = dm1FieldStore.name;\n // const dm2FieldStoreName = dm2FieldStore.name;\n const commonSchemaArr = getCommonSchema(dm1FieldStore, dm2FieldStore);\n\n return (dm1Fields, dm2Fields) => {\n let retainTuple = true;\n commonSchemaArr.forEach((fieldName) => {\n if (dm1Fields[fieldName].internalValue ===\n dm2Fields[fieldName].internalValue && retainTuple) {\n retainTuple = true;\n } else {\n retainTuple = false;\n }\n });\n return retainTuple;\n };\n}\n","import DataModel from '../export';\nimport { extend2 } from '../utils';\nimport { rowDiffsetIterator } from './row-diffset-iterator';\nimport { isArrEqual } from '../utils/helper';\n/**\n * Performs the union operation between two dm instances.\n *\n * @param {dm} dm1 - The first dm instance.\n * @param {dm} dm2 - The second dm instance.\n * @return {dm} Returns the newly created dm after union operation.\n */\nexport function union (dm1, dm2) {\n const hashTable = {};\n const schema = [];\n const schemaNameArr = [];\n const data = [];\n const dm1FieldStore = dm1.getFieldspace();\n const dm2FieldStore = dm2.getFieldspace();\n const dm1FieldStoreFieldObj = dm1FieldStore.fieldsObj();\n const dm2FieldStoreFieldObj = dm2FieldStore.fieldsObj();\n const name = `${dm1FieldStore.name} union ${dm2FieldStore.name}`;\n\n // For union the columns should match otherwise return a clone of the dm1\n if (!isArrEqual(dm1._colIdentifier.split(',').sort(), dm2._colIdentifier.split(',').sort())) {\n return null;\n }\n\n // Prepare the schema\n (dm1._colIdentifier.split(',')).forEach((fieldName) => {\n const field = dm1FieldStoreFieldObj[fieldName];\n schema.push(extend2({}, field.schema()));\n schemaNameArr.push(field.schema().name);\n });\n\n /**\n * The helper function to create the data.\n *\n * @param {dm} dm - The dm instance for which the data is inserted.\n * @param {Object} fieldsObj - The fieldStore object format.\n */\n function prepareDataHelper (dm, fieldsObj) {\n rowDiffsetIterator(dm._rowDiffset, (i) => {\n const tuple = {};\n let hashData = '';\n schemaNameArr.forEach((schemaName) => {\n const value = fieldsObj[schemaName].partialField.data[i];\n hashData += `-${value}`;\n tuple[schemaName] = value;\n });\n if (!hashTable[hashData]) {\n data.push(tuple);\n hashTable[hashData] = true;\n }\n });\n }\n\n // Prepare the data\n prepareDataHelper(dm1, dm1FieldStoreFieldObj);\n prepareDataHelper(dm2, dm2FieldStoreFieldObj);\n\n return new DataModel(data, schema, { name });\n}\n","import { crossProduct } from './cross-product';\nimport { JOINS } from '../constants';\nimport { union } from './union';\n\n\nexport function leftOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel1, dataModel2, filterFn, false, JOINS.LEFTOUTER);\n}\n\nexport function rightOuterJoin (dataModel1, dataModel2, filterFn) {\n return crossProduct(dataModel2, dataModel1, filterFn, false, JOINS.RIGHTOUTER);\n}\n\nexport function fullOuterJoin (dataModel1, dataModel2, filterFn) {\n return union(leftOuterJoin(dataModel1, dataModel2, filterFn), rightOuterJoin(dataModel1, dataModel2, filterFn));\n}\n","/**\n * Stores the full data and the metadata of a field. It provides\n * a single source of data from which the future Field\n * instance can get a subset of it with a rowDiffset config.\n *\n * @class\n * @public\n */\nexport default class PartialField {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {string} name - The name of the field.\n * @param {Array} data - The data array.\n * @param {Object} schema - The schema object of the corresponding field.\n * @param {FieldParser} parser - The parser instance corresponding to that field.\n */\n constructor (name, data, schema, parser) {\n this.name = name;\n this.schema = schema;\n this.parser = parser;\n this.data = this._sanitize(data);\n }\n\n /**\n * Sanitizes the field data.\n *\n * @private\n * @param {Array} data - The actual input data.\n * @return {Array} Returns the sanitized data.\n */\n _sanitize (data) {\n return data.map(datum => this.parser.parse(datum, { format: this.schema.format }));\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport PartialField from '../partial-field';\n\n/**\n * In {@link DataModel}, every tabular data consists of column, a column is stored as field.\n * Field contains all the data for a given column in an array.\n *\n * Each record consists of several fields; the fields of all records form the columns.\n * Examples of fields: name, gender, sex etc.\n *\n * In DataModel, each field can have multiple attributes which describes its data and behaviour.\n * A field can have two types of data: Measure and Dimension.\n *\n * A Dimension Field is the context on which a data is categorized and the measure is the numerical values that\n * quantify the data set.\n * In short a dimension is the lens through which you are looking at your measure data.\n *\n * Refer to {@link Schema} to get info about possible field attributes.\n *\n * @public\n * @class\n */\nexport default class Field {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n this.partialField = partialField;\n this.rowDiffset = rowDiffset;\n }\n\n static parser() {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Generates the field type specific domain.\n *\n * @public\n * @abstract\n */\n domain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the the field schema.\n *\n * @public\n * @return {string} Returns the field schema.\n */\n schema () {\n return this.partialField.schema;\n }\n\n /**\n * Returns the name of the field.\n *\n * @public\n * @return {string} Returns the name of the field.\n */\n name () {\n return this.partialField.name;\n }\n\n /**\n * Returns the type of the field.\n *\n * @public\n * @return {string} Returns the type of the field.\n */\n type () {\n return this.partialField.schema.type;\n }\n\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return this.partialField.schema.subtype;\n }\n\n /**\n * Returns the description of the field.\n *\n * @public\n * @return {string} Returns the description of the field.\n */\n description () {\n return this.partialField.schema.description;\n }\n\n /**\n * Returns the display name of the field.\n *\n * @public\n * @return {string} Returns the display name of the field.\n */\n displayName () {\n return this.partialField.schema.displayName || this.partialField.schema.name;\n }\n\n /**\n * Returns the data associated with the field.\n *\n * @public\n * @return {Array} Returns the data.\n */\n data () {\n const data = [];\n rowDiffsetIterator(this.rowDiffset, (i) => {\n data.push(this.partialField.data[i]);\n });\n return data;\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @abstract\n */\n formattedData () {\n throw new Error('Not yet implemented');\n }\n\n static get BUILDER() {\n const builder = {\n _params: {},\n _context: this,\n fieldName(name) {\n this._params.name = name;\n return this;\n },\n schema(schema) {\n this._params.schema = schema;\n return this;\n },\n data(data) {\n this._params.data = data;\n return this;\n },\n partialField(partialField) {\n this._params.partialField = partialField;\n return this;\n },\n rowDiffset(rowDiffset) {\n this._params.rowDiffset = rowDiffset;\n return this;\n },\n build() {\n let partialField = null;\n if (this._params.partialField instanceof PartialField) {\n partialField = this._params.partialField;\n } else if (this._params.schema && this._params.data) {\n partialField = new PartialField(this._params.name,\n this._params.data,\n this._params.schema,\n this._context.parser());\n }\n else {\n throw new Error('Invalid Field parameters');\n }\n return new this._context(partialField, this._params.rowDiffset);\n }\n };\n return builder;\n }\n}\n","import Field from '../field';\n\n/**\n * Represents dimension field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Dimension extends Field {\n /**\n * Returns the domain for the dimension field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","import { formatNumber } from '../../utils';\nimport { defaultReducerName } from '../../operator/group-by-function';\nimport Field from '../field';\n\n/**\n * Represents measure field type.\n *\n * @public\n * @class\n * @extends Field\n */\nexport default class Measure extends Field {\n /**\n * Returns the domain for the measure field.\n *\n * @override\n * @public\n * @return {any} Returns the calculated domain.\n */\n domain () {\n if (!this._cachedDomain) {\n this._cachedDomain = this.calculateDataDomain();\n }\n return this._cachedDomain;\n }\n\n /**\n * Returns the unit of the measure field.\n *\n * @public\n * @return {string} Returns unit of the field.\n */\n unit () {\n return this.partialField.schema.unit;\n }\n\n /**\n * Returns the aggregation function name of the measure field.\n *\n * @public\n * @return {string} Returns aggregation function name of the field.\n */\n defAggFn () {\n return this.partialField.schema.defAggFn || defaultReducerName;\n }\n\n /**\n * Returns the number format of the measure field.\n *\n * @public\n * @return {Function} Returns number format of the field.\n */\n numberFormat () {\n const { numberFormat } = this.partialField.schema;\n return numberFormat instanceof Function ? numberFormat : formatNumber;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @abstract\n */\n calculateDataDomain () {\n throw new Error('Not yet implemented');\n }\n\n /**\n * Returns the formatted version of the underlying field data.\n *\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n return this.data();\n }\n}\n","/**\n * A interface to represent a parser which is responsible to parse the field.\n *\n * @public\n * @interface\n */\nexport default class FieldParser {\n /**\n * Parses a single value of a field and return the sanitized form.\n *\n * @public\n * @abstract\n */\n parse () {\n throw new Error('Not yet implemented');\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the categorical values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class CategoricalParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the stringified form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the stringified value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n result = String(val).trim();\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport { DimensionSubtype } from '../../enums';\nimport Dimension from '../dimension';\nimport CategoricalParser from '../parsers/categorical-parser';\n/**\n * Represents categorical field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Categorical extends Dimension {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return DimensionSubtype.CATEGORICAL;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n const hash = new Set();\n const domain = [];\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n if (!hash.has(datum)) {\n hash.add(datum);\n domain.push(datum);\n }\n });\n return domain;\n }\n\n static parser() {\n return new CategoricalParser();\n }\n}\n","import { DateTimeFormatter } from '../../../utils';\nimport FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the temporal values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class TemporalParser extends FieldParser {\n\n /**\n * Parses a single value of a field and returns the millisecond value.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {number} Returns the millisecond value.\n */\n parse (val, { format }) {\n let result;\n // check if invalid date value\n if (!this._dtf) {\n this._dtf = new DateTimeFormatter(format);\n }\n if (!InvalidAwareTypes.isInvalid(val)) {\n let nativeDate = this._dtf.getNativeDate(val);\n result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { rowDiffsetIterator } from '../operator/row-diffset-iterator';\nimport InvalidAwareTypes from '../invalid-aware-types';\n\nexport const calculateContinuousDomain = (data, rowDiffset) => {\n let min = Number.POSITIVE_INFINITY;\n let max = Number.NEGATIVE_INFINITY;\n\n // here don't use this.data() as the iteration will be occurred two times on same data.\n rowDiffsetIterator(rowDiffset, (i) => {\n const datum = data[i];\n if (datum instanceof InvalidAwareTypes) {\n return;\n }\n\n if (datum < min) {\n min = datum;\n }\n if (datum > max) {\n max = datum;\n }\n });\n\n return [min, max];\n};\n","import { rowDiffsetIterator } from '../../operator/row-diffset-iterator';\nimport Dimension from '../dimension';\nimport { DateTimeFormatter } from '../../utils';\nimport InvalidAwareTypes from '../../invalid-aware-types';\nimport TemporalParser from '../parsers/temporal-parser';\nimport { calculateContinuousDomain } from '../helper';\n\n/**\n * Represents temporal field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Temporal extends Dimension {\n /**\n * Initialize a new instance.\n *\n * @public\n * @param {PartialField} partialField - The partialField instance which holds the whole data.\n * @param {string} rowDiffset - The data subset definition.\n */\n constructor (partialField, rowDiffset) {\n super(partialField, rowDiffset);\n\n this._cachedMinDiff = null;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the unique values.\n */\n calculateDataDomain () {\n return calculateContinuousDomain(this.partialField.data, this.rowDiffset);\n }\n\n\n /**\n * Calculates the minimum consecutive difference from the associated field data.\n *\n * @public\n * @return {number} Returns the minimum consecutive diff in milliseconds.\n */\n minimumConsecutiveDifference () {\n if (this._cachedMinDiff) {\n return this._cachedMinDiff;\n }\n\n const sortedData = this.data().filter(item => !(item instanceof InvalidAwareTypes)).sort((a, b) => a - b);\n const arrLn = sortedData.length;\n let minDiff = Number.POSITIVE_INFINITY;\n let prevDatum;\n let nextDatum;\n let processedCount = 0;\n\n for (let i = 1; i < arrLn; i++) {\n prevDatum = sortedData[i - 1];\n nextDatum = sortedData[i];\n\n if (nextDatum === prevDatum) {\n continue;\n }\n\n minDiff = Math.min(minDiff, nextDatum - sortedData[i - 1]);\n processedCount++;\n }\n\n if (!processedCount) {\n minDiff = null;\n }\n this._cachedMinDiff = minDiff;\n\n return this._cachedMinDiff;\n }\n\n /**\n * Returns the format specified in the input schema while creating field.\n *\n * @public\n * @return {string} Returns the datetime format.\n */\n format () {\n return this.partialField.schema.format;\n }\n\n /**\n * Returns the formatted version of the underlying field data\n * If data is of type invalid or has missing format use the raw value\n * @public\n * @override\n * @return {Array} Returns the formatted data.\n */\n formattedData () {\n const data = [];\n const dataFormat = this.format();\n\n rowDiffsetIterator(this.rowDiffset, (i) => {\n const datum = this.partialField.data[i];\n // If value is of invalid type or format is missing\n if (InvalidAwareTypes.isInvalid(datum) || (!dataFormat && Number.isFinite(datum))) {\n // Use the invalid map value or the raw value\n const parsedDatum = InvalidAwareTypes.getInvalidType(datum) || datum;\n data.push(parsedDatum);\n } else {\n data.push(DateTimeFormatter.formatAs(datum, dataFormat));\n }\n });\n return data;\n }\n\n static parser() {\n return new TemporalParser();\n }\n}\n\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the binned values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class BinnedParser extends FieldParser {\n /**\n * Parses a single binned value of a field and returns the sanitized value.\n *\n * @public\n * @param {string} val - The value of the field.\n * @return {string} Returns the sanitized value.\n */\n parse (val) {\n const regex = /^\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*-\\s*([+-]?\\d+(?:\\.\\d+)?)\\s*$/;\n val = String(val);\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let matched = val.match(regex);\n result = matched ? `${Number.parseFloat(matched[1])}-${Number.parseFloat(matched[2])}`\n : InvalidAwareTypes.NA;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import Dimension from '../dimension';\nimport BinnedParser from '../parsers/binned-parser';\n\n/**\n * Represents binned field subtype.\n *\n * @public\n * @class\n * @extends Dimension\n */\nexport default class Binned extends Dimension {\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the last and first values of bins config array.\n */\n calculateDataDomain () {\n const binsArr = this.partialField.schema.bins;\n return [binsArr[0], binsArr[binsArr.length - 1]];\n }\n\n /**\n * Returns the bins config provided while creating the field instance.\n *\n * @public\n * @return {Array} Returns the bins array config.\n */\n bins () {\n return this.partialField.schema.bins;\n }\n\n static parser() {\n return new BinnedParser();\n }\n}\n","import FieldParser from '../field-parser';\nimport InvalidAwareTypes from '../../../invalid-aware-types';\n\n/**\n * A FieldParser which parses the continuous values.\n *\n * @public\n * @class\n * @implements {FieldParser}\n */\nexport default class ContinuousParser extends FieldParser {\n /**\n * Parses a single value of a field and returns the number form.\n *\n * @public\n * @param {string|number} val - The value of the field.\n * @return {string} Returns the number value.\n */\n parse (val) {\n let result;\n // check if invalid date value\n if (!InvalidAwareTypes.isInvalid(val)) {\n let parsedVal = parseFloat(val, 10);\n result = Number.isNaN(parsedVal) ? InvalidAwareTypes.NA : parsedVal;\n } else {\n result = InvalidAwareTypes.getInvalidType(val);\n }\n return result;\n }\n}\n","import { MeasureSubtype } from '../../enums';\nimport Measure from '../measure';\nimport ContinuousParser from '../parsers/continuous-parser';\nimport { calculateContinuousDomain } from '../helper';\n\n/**\n * Represents continuous field subtype.\n *\n * @public\n * @class\n * @extends Measure\n */\nexport default class Continuous extends Measure {\n /**\n * Returns the subtype of the field.\n *\n * @public\n * @override\n * @return {string} Returns the subtype of the field.\n */\n subtype () {\n return MeasureSubtype.CONTINUOUS;\n }\n\n /**\n * Calculates the corresponding field domain.\n *\n * @public\n * @override\n * @return {Array} Returns the min and max values.\n */\n calculateDataDomain () {\n return calculateContinuousDomain(this.partialField.data, this.rowDiffset);\n }\n\n static parser() {\n return new ContinuousParser();\n }\n}\n","import Categorical from './categorical';\nimport Temporal from './temporal';\nimport Binned from './binned';\nimport Continuous from './continuous';\nimport { DimensionSubtype, MeasureSubtype } from '../enums';\n\n\nclass FieldTypeRegistry {\n constructor() {\n this._fieldType = new Map();\n }\n\n registerFieldType(subtype, dimension) {\n this._fieldType.set(subtype, dimension);\n return this;\n }\n\n has(type) {\n return this._fieldType.has(type);\n }\n\n get(type) {\n return this._fieldType.get(type);\n }\n}\n\nconst registerDefaultFields = (store) => {\n store\n .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical)\n .registerFieldType(DimensionSubtype.TEMPORAL, Temporal)\n .registerFieldType(DimensionSubtype.BINNED, Binned)\n .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous);\n};\n\nconst fieldRegistry = (function () {\n let store = null;\n function getStore () {\n store = new FieldTypeRegistry();\n registerDefaultFields(store);\n return store;\n }\n return store || getStore();\n}());\n\nexport default fieldRegistry;\n\n","import { FieldType, DimensionSubtype, MeasureSubtype } from './enums';\nimport { fieldRegistry } from './fields';\n\n/**\n * Creates a field instance according to the provided data and schema.\n *\n * @param {Array} data - The field data array.\n * @param {Object} schema - The field schema object.\n * @return {Field} Returns the newly created field instance.\n */\nfunction createUnitField(data, schema) {\n data = data || [];\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .fieldName(schema.name)\n .schema(schema)\n .data(data)\n .rowDiffset(`0-${data.length - 1}`)\n .build();\n}\n\n\n/**\n * Creates a field instance from partialField and rowDiffset.\n *\n * @param {PartialField} partialField - The corresponding partial field.\n * @param {string} rowDiffset - The data subset config.\n * @return {Field} Returns the newly created field instance.\n */\nexport function createUnitFieldFromPartial(partialField, rowDiffset) {\n const { schema } = partialField;\n\n if (fieldRegistry.has(schema.subtype)) {\n return fieldRegistry.get(schema.subtype)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n }\n return fieldRegistry\n .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL)\n .BUILDER\n .partialField(partialField)\n .rowDiffset(rowDiffset)\n .build();\n}\n\n/**\n * Creates the field instances with input data and schema.\n *\n * @param {Array} dataColumn - The data array for fields.\n * @param {Array} schema - The schema array for fields.\n * @param {Array} headers - The array of header names.\n * @return {Array.} Returns an array of newly created field instances.\n */\nexport function createFields(dataColumn, schema, headers) {\n const headersObj = {};\n\n if (!(headers && headers.length)) {\n headers = schema.map(item => item.name);\n }\n\n headers.forEach((header, i) => {\n headersObj[header] = i;\n });\n\n return schema.map(item => createUnitField(dataColumn[headersObj[item.name]], item));\n}\n","import { DataFormat } from './enums';\n\nexport default {\n dataFormat: DataFormat.AUTO\n};\n","/**\n * Interface for all data converters\n */\nexport default class DataConverter {\n constructor(type) {\n this._type = type;\n }\n\n get type() {\n return this._type;\n }\n\n convert() {\n throw new Error('Convert method not implemented.');\n }\n\n}\n","var EOL = {},\n EOF = {},\n QUOTE = 34,\n NEWLINE = 10,\n RETURN = 13;\n\nfunction objectConverter(columns) {\n return new Function(\"d\", \"return {\" + columns.map(function(name, i) {\n return JSON.stringify(name) + \": d[\" + i + \"] || \\\"\\\"\";\n }).join(\",\") + \"}\");\n}\n\nfunction customConverter(columns, f) {\n var object = objectConverter(columns);\n return function(row, i) {\n return f(object(row), i, columns);\n };\n}\n\n// Compute unique columns in order of discovery.\nfunction inferColumns(rows) {\n var columnSet = Object.create(null),\n columns = [];\n\n rows.forEach(function(row) {\n for (var column in row) {\n if (!(column in columnSet)) {\n columns.push(columnSet[column] = column);\n }\n }\n });\n\n return columns;\n}\n\nfunction pad(value, width) {\n var s = value + \"\", length = s.length;\n return length < width ? new Array(width - length + 1).join(0) + s : s;\n}\n\nfunction formatYear(year) {\n return year < 0 ? \"-\" + pad(-year, 6)\n : year > 9999 ? \"+\" + pad(year, 6)\n : pad(year, 4);\n}\n\nfunction formatDate(date) {\n var hours = date.getUTCHours(),\n minutes = date.getUTCMinutes(),\n seconds = date.getUTCSeconds(),\n milliseconds = date.getUTCMilliseconds();\n return isNaN(date) ? \"Invalid Date\"\n : formatYear(date.getUTCFullYear(), 4) + \"-\" + pad(date.getUTCMonth() + 1, 2) + \"-\" + pad(date.getUTCDate(), 2)\n + (milliseconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \".\" + pad(milliseconds, 3) + \"Z\"\n : seconds ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \":\" + pad(seconds, 2) + \"Z\"\n : minutes || hours ? \"T\" + pad(hours, 2) + \":\" + pad(minutes, 2) + \"Z\"\n : \"\");\n}\n\nexport default function(delimiter) {\n var reFormat = new RegExp(\"[\\\"\" + delimiter + \"\\n\\r]\"),\n DELIMITER = delimiter.charCodeAt(0);\n\n function parse(text, f) {\n var convert, columns, rows = parseRows(text, function(row, i) {\n if (convert) return convert(row, i - 1);\n columns = row, convert = f ? customConverter(row, f) : objectConverter(row);\n });\n rows.columns = columns || [];\n return rows;\n }\n\n function parseRows(text, f) {\n var rows = [], // output rows\n N = text.length,\n I = 0, // current character index\n n = 0, // current line number\n t, // current token\n eof = N <= 0, // current token followed by EOF?\n eol = false; // current token followed by EOL?\n\n // Strip the trailing newline.\n if (text.charCodeAt(N - 1) === NEWLINE) --N;\n if (text.charCodeAt(N - 1) === RETURN) --N;\n\n function token() {\n if (eof) return EOF;\n if (eol) return eol = false, EOL;\n\n // Unescape quotes.\n var i, j = I, c;\n if (text.charCodeAt(j) === QUOTE) {\n while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE);\n if ((i = I) >= N) eof = true;\n else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n return text.slice(j + 1, i - 1).replace(/\"\"/g, \"\\\"\");\n }\n\n // Find next delimiter or newline.\n while (I < N) {\n if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true;\n else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; }\n else if (c !== DELIMITER) continue;\n return text.slice(j, i);\n }\n\n // Return last token before EOF.\n return eof = true, text.slice(j, N);\n }\n\n while ((t = token()) !== EOF) {\n var row = [];\n while (t !== EOL && t !== EOF) row.push(t), t = token();\n if (f && (row = f(row, n++)) == null) continue;\n rows.push(row);\n }\n\n return rows;\n }\n\n function preformatBody(rows, columns) {\n return rows.map(function(row) {\n return columns.map(function(column) {\n return formatValue(row[column]);\n }).join(delimiter);\n });\n }\n\n function format(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join(\"\\n\");\n }\n\n function formatBody(rows, columns) {\n if (columns == null) columns = inferColumns(rows);\n return preformatBody(rows, columns).join(\"\\n\");\n }\n\n function formatRows(rows) {\n return rows.map(formatRow).join(\"\\n\");\n }\n\n function formatRow(row) {\n return row.map(formatValue).join(delimiter);\n }\n\n function formatValue(value) {\n return value == null ? \"\"\n : value instanceof Date ? formatDate(value)\n : reFormat.test(value += \"\") ? \"\\\"\" + value.replace(/\"/g, \"\\\"\\\"\") + \"\\\"\"\n : value;\n }\n\n return {\n parse: parse,\n parseRows: parseRows,\n format: format,\n formatBody: formatBody,\n formatRows: formatRows,\n formatRow: formatRow,\n formatValue: formatValue\n };\n}\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in DSV array to a manageable internal format.\n *\n * @param {Array.} arr - A 2D array containing of the DSV data.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv data is header or not.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * [\"a\", \"b\", \"c\"],\n * [1, 2, 3],\n * [4, 5, 6],\n * [7, 8, 9]\n * ];\n */\nfunction DSVArr(arr, schema, options) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n const defaultOption = {\n firstRowHeader: true,\n };\n const schemaFields = schema.map(unitSchema => unitSchema.name);\n options = Object.assign({}, defaultOption, options);\n\n const columns = [];\n const push = columnMajor(columns);\n\n let headers = schemaFields;\n if (options.firstRowHeader) {\n // If header present then remove the first header row.\n // Do in-place mutation to save space.\n headers = arr.splice(0, 1)[0];\n }\n // create a map of the headers\n const headerMap = headers.reduce((acc, h, i) => (\n Object.assign(acc, { [h]: i })\n ), {});\n\n arr.forEach((fields) => {\n const field = [];\n schemaFields.forEach((schemaField) => {\n const headIndex = headerMap[schemaField];\n field.push(fields[headIndex]);\n });\n return push(...field);\n });\n return [schemaFields, columns];\n}\n\nexport default DSVArr;\n","import { dsvFormat as d3Dsv } from 'd3-dsv';\nimport DSVArr from './dsv-arr';\n\n/**\n * Parses and converts data formatted in DSV string to a manageable internal format.\n *\n * @todo Support to be given for https://tools.ietf.org/html/rfc4180.\n * @todo Sample implementation https://github.com/knrz/CSV.js/.\n *\n * @param {string} str - The input DSV string.\n * @param {Object} options - Option to control the behaviour of the parsing.\n * @param {boolean} [options.firstRowHeader=true] - Whether the first row of the dsv string data is header or not.\n * @param {string} [options.fieldSeparator=\",\"] - The separator of two consecutive field.\n * @return {Array} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = `\n * a,b,c\n * 1,2,3\n * 4,5,6\n * 7,8,9\n * `\n */\nfunction DSVStr (str, schema, options) {\n const defaultOption = {\n firstRowHeader: true,\n fieldSeparator: ','\n };\n options = Object.assign({}, defaultOption, options);\n\n const dsv = d3Dsv(options.fieldSeparator);\n return DSVArr(dsv.parseRows(str), schema, options);\n}\n\nexport default DSVStr;\n","import DataConverter from '../model/dataConverter';\nimport DSVStr from '../utils/dsv-str';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVStringConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_STR);\n }\n\n convert(data, schema, options) {\n return DSVStr(data, schema, options);\n }\n}\n","import { columnMajor } from '../../utils';\n\n/**\n * Parses and converts data formatted in JSON to a manageable internal format.\n *\n * @param {Array.} arr - The input data formatted in JSON.\n * @return {Array.} Returns an array of headers and column major data.\n * @example\n *\n * // Sample input data:\n * const data = [\n * {\n * \"a\": 1,\n * \"b\": 2,\n * \"c\": 3\n * },\n * {\n * \"a\": 4,\n * \"b\": 5,\n * \"c\": 6\n * },\n * {\n * \"a\": 7,\n * \"b\": 8,\n * \"c\": 9\n * }\n * ];\n */\nfunction FlatJSON (arr, schema) {\n if (!Array.isArray(schema)) {\n throw new Error('Schema missing or is in an unsupported format');\n }\n\n const header = {};\n let i = 0;\n let insertionIndex;\n const columns = [];\n const push = columnMajor(columns);\n const schemaFieldsName = schema.map(unitSchema => unitSchema.name);\n\n arr.forEach((item) => {\n const fields = [];\n schemaFieldsName.forEach((unitSchema) => {\n if (unitSchema in header) {\n insertionIndex = header[unitSchema];\n } else {\n header[unitSchema] = i++;\n insertionIndex = i - 1;\n }\n fields[insertionIndex] = item[unitSchema];\n });\n push(...fields);\n });\n\n return [Object.keys(header), columns];\n}\n\nexport default FlatJSON;\n","import DataConverter from '../model/dataConverter';\nimport FlatJSON from '../utils/flat-json';\nimport DataFormat from '../../enums/data-format';\n\nexport default class JSONConverter extends DataConverter {\n constructor() {\n super(DataFormat.FLAT_JSON);\n }\n\n convert(data, schema, options) {\n return FlatJSON(data, schema, options);\n }\n}\n","import DataConverter from '../model/dataConverter';\nimport DSVArr from '../utils/dsv-arr';\nimport DataFormat from '../../enums/data-format';\n\nexport default class DSVArrayConverter extends DataConverter {\n constructor() {\n super(DataFormat.DSV_ARR);\n }\n\n convert(data, schema, options) {\n return DSVArr(data, schema, options);\n }\n}\n","import FlatJSON from './flat-json';\nimport DSVArr from './dsv-arr';\nimport DSVStr from './dsv-str';\nimport { detectDataFormat } from '../../utils';\n\n/**\n * Parses the input data and detect the format automatically.\n *\n * @param {string|Array} data - The input data.\n * @param {Object} options - An optional config specific to data format.\n * @return {Array.} Returns an array of headers and column major data.\n */\nfunction Auto (data, schema, options) {\n const converters = { FlatJSON, DSVStr, DSVArr };\n const dataFormat = detectDataFormat(data);\n\n if (!dataFormat) {\n throw new Error('Couldn\\'t detect the data format');\n }\n\n return converters[dataFormat](data, schema, options);\n}\n\nexport default Auto;\n","import DataConverter from '../model/dataConverter';\nimport AUTO from '../utils/auto-resolver';\nimport DataFormat from '../../enums/data-format';\n\nexport default class AutoDataConverter extends DataConverter {\n constructor() {\n super(DataFormat.AUTO);\n }\n\n convert(data, schema, options) {\n return AUTO(data, schema, options);\n }\n}\n","import DataConverter from './model/dataConverter';\nimport { DSVStringConverter, DSVArrayConverter, JSONConverter, AutoDataConverter } from './defaultConverters';\n\nclass DataConverterStore {\n constructor() {\n this.store = new Map();\n this.converters(this._getDefaultConverters());\n }\n\n _getDefaultConverters() {\n return [\n new DSVStringConverter(),\n new DSVArrayConverter(),\n new JSONConverter(),\n new AutoDataConverter()\n ];\n }\n\n /**\n * Sets the given converters in the store and returns the store\n * @param {Array} converters : contains array of converter instance\n * @return { Map }\n */\n converters(converters = []) {\n converters.forEach(converter => this.store.set(converter.type, converter));\n return this.store;\n }\n\n /**\n * Registers a Converter of type DataConverter\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n register(converter) {\n if (converter instanceof DataConverter) {\n this.store.set(converter.type, converter);\n return this;\n }\n return null;\n }\n\n /**\n * Rempves a converter from store\n * @param {DataConverter} converter : converter Instance\n * @returns self\n */\n\n unregister(converter) {\n this.store.delete(converter.type);\n return this;\n }\n\n get(name) {\n if (this.store.has(name)) {\n return this.store.get(name);\n }\n return null;\n }\n\n}\n\nconst converterStore = (function () {\n let store = null;\n\n function getStore () {\n store = new DataConverterStore();\n return store;\n }\n return store || getStore();\n}());\n\nexport default converterStore;\n","import { FieldType, FilteringMode, DimensionSubtype, MeasureSubtype, DataFormat } from './enums';\nimport fieldStore from './field-store';\nimport Value from './value';\nimport {\n rowDiffsetIterator\n} from './operator';\nimport { DM_DERIVATIVES, LOGICAL_OPERATORS, ROW_ID } from './constants';\nimport { createFields, createUnitFieldFromPartial } from './field-creator';\nimport defaultConfig from './default-config';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\nimport { extend2, detectDataFormat } from './utils';\n\n/**\n * Prepares the selection data.\n */\nfunction prepareSelectionData (fields, formattedData, rawData, i) {\n const resp = {};\n\n for (const [key, field] of fields.entries()) {\n resp[field.name()] = new Value(formattedData[key][i], rawData[key][i], field);\n }\n return resp;\n}\n\nexport function prepareJoinData (fields) {\n const resp = {};\n\n for (const key in fields) {\n resp[key] = new Value(fields[key].formattedValue, fields[key].rawValue, key);\n }\n return resp;\n}\n\nexport const updateFields = ([rowDiffset, colIdentifier], partialFieldspace, fieldStoreName) => {\n let collID = colIdentifier.length ? colIdentifier.split(',') : [];\n let partialFieldMap = partialFieldspace.fieldsObj();\n let newFields = collID.map(coll => createUnitFieldFromPartial(partialFieldMap[coll].partialField, rowDiffset));\n return fieldStore.createNamespace(newFields, fieldStoreName);\n};\n\nexport const persistCurrentDerivation = (model, operation, config = {}, criteriaFn) => {\n if (operation === DM_DERIVATIVES.COMPOSE) {\n model._derivation.length = 0;\n model._derivation.push(...criteriaFn);\n } else {\n model._derivation.push({\n op: operation,\n meta: config,\n criteria: criteriaFn\n });\n }\n};\nexport const persistAncestorDerivation = (sourceDm, newDm) => {\n newDm._ancestorDerivation.push(...sourceDm._ancestorDerivation, ...sourceDm._derivation);\n};\n\nexport const persistDerivations = (sourceDm, model, operation, config = {}, criteriaFn) => {\n persistCurrentDerivation(model, operation, config, criteriaFn);\n persistAncestorDerivation(sourceDm, model);\n};\n\nconst selectModeMap = {\n [FilteringMode.NORMAL]: {\n diffIndex: ['rowDiffset'],\n calcDiff: [true, false]\n },\n [FilteringMode.INVERSE]: {\n diffIndex: ['rejectRowDiffset'],\n calcDiff: [false, true]\n },\n [FilteringMode.ALL]: {\n diffIndex: ['rowDiffset', 'rejectRowDiffset'],\n calcDiff: [true, true]\n }\n};\n\nconst generateRowDiffset = (rowDiffset, i, lastInsertedValue) => {\n if (lastInsertedValue !== -1 && i === (lastInsertedValue + 1)) {\n const li = rowDiffset.length - 1;\n\n rowDiffset[li] = `${rowDiffset[li].split('-')[0]}-${i}`;\n } else {\n rowDiffset.push(`${i}`);\n }\n};\n\nexport const selectRowDiffsetIterator = (rowDiffset, checker, mode) => {\n let lastInsertedValueSel = -1;\n let lastInsertedValueRej = -1;\n const newRowDiffSet = [];\n const rejRowDiffSet = [];\n\n const [shouldSelect, shouldReject] = selectModeMap[mode].calcDiff;\n\n rowDiffsetIterator(rowDiffset, (i) => {\n const checkerResult = checker(i);\n checkerResult && shouldSelect && generateRowDiffset(newRowDiffSet, i, lastInsertedValueSel);\n !checkerResult && shouldReject && generateRowDiffset(rejRowDiffSet, i, lastInsertedValueRej);\n });\n return {\n rowDiffset: newRowDiffSet.join(','),\n rejectRowDiffset: rejRowDiffSet.join(',')\n };\n};\n\n\nexport const rowSplitDiffsetIterator = (rowDiffset, checker, mode, dimensionArr, fieldStoreObj) => {\n let lastInsertedValue = {};\n const splitRowDiffset = {};\n const dimensionMap = {};\n\n rowDiffsetIterator(rowDiffset, (i) => {\n if (checker(i)) {\n let hash = '';\n\n let dimensionSet = { keys: {} };\n\n dimensionArr.forEach((_) => {\n const data = fieldStoreObj[_].partialField.data[i];\n hash = `${hash}-${data}`;\n dimensionSet.keys[_] = data;\n });\n\n if (splitRowDiffset[hash] === undefined) {\n splitRowDiffset[hash] = [];\n lastInsertedValue[hash] = -1;\n dimensionMap[hash] = dimensionSet;\n }\n\n generateRowDiffset(splitRowDiffset[hash], i, lastInsertedValue[hash]);\n lastInsertedValue[hash] = i;\n }\n });\n\n return {\n splitRowDiffset,\n dimensionMap\n };\n};\n\n\nexport const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => {\n let cachedStore = {};\n let cloneProvider = () => sourceDm.detachedRoot();\n const { mode } = config;\n const rowDiffset = clonedDm._rowDiffset;\n const cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects;\n\n const selectorHelperFn = index => selectFn(\n cachedValueObjects[index],\n index,\n cloneProvider,\n cachedStore\n );\n\n return iterator(rowDiffset, selectorHelperFn, mode);\n};\n\nexport const cloneWithAllFields = (model) => {\n const clonedDm = model.clone(false);\n const partialFieldspace = model.getPartialFieldspace();\n clonedDm._colIdentifier = partialFieldspace.fields.map(f => f.name()).join(',');\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n\n return clonedDm;\n};\n\nconst getKey = (arr, data, fn, rowId) => {\n let key = fn(arr, data, 0, rowId);\n\n for (let i = 1, len = arr.length; i < len; i++) {\n key = `${key},${fn(arr, data, i, rowId)}`;\n }\n return key;\n};\n\nconst keyFn = (arr, fields, idx, rowId) => {\n const field = arr[idx];\n const val = field === ROW_ID ? rowId : fields[field].internalValue;\n return val;\n};\n\nconst domainChecker = (val, domain) => {\n const domainArr = domain[0] instanceof Array ? domain : [domain];\n return domainArr.some(dom => val >= dom[0] && val <= dom[1]);\n};\n\nconst boundsChecker = {\n [MeasureSubtype.CONTINUOUS]: domainChecker,\n [DimensionSubtype.TEMPORAL]: domainChecker\n};\n\nconst isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain);\n\nexport const filterPropagationModel = (model, propModels, config = {}) => {\n let fns = [];\n const operation = config.operation || LOGICAL_OPERATORS.AND;\n const { filterByDim = true, filterByMeasure = false, clone = true } = config;\n const clonedModel = clone ? cloneWithAllFields(model) : model;\n const modelFieldsConfig = clonedModel.getFieldsConfig();\n\n if (!propModels.length) {\n fns = [() => false];\n } else {\n fns = propModels.map(propModel => (({ criteria = {} }) => {\n const { identifiers = [[], []], range } = criteria;\n let [fieldNames = [], values = []] = identifiers;\n const indices = fieldNames.reduce((map, name, i) => {\n map[name] = i;\n return map;\n }, {});\n fieldNames = fieldNames.filter(field => (field in modelFieldsConfig &&\n modelFieldsConfig[field].def.type === FieldType.DIMENSION) || field === ROW_ID);\n const dLen = fieldNames.length;\n const valuesMap = {};\n\n if (dLen) {\n for (let i = 1, len = identifiers.length; i < len; i++) {\n const row = identifiers[i];\n const key = `${fieldNames.map((field) => {\n const idx = indices[field];\n return row[idx];\n })}`;\n valuesMap[key] = 1;\n }\n }\n let rangeKeys = Object.keys(range || {}).filter(field => field in modelFieldsConfig);\n const hasData = values.length || rangeKeys.length;\n\n if (!filterByMeasure) {\n rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.MEASURE);\n }\n\n if (!filterByDim) {\n rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.DIMENSION);\n }\n\n return hasData ? (fields, i) => {\n let present = true;\n if (filterByDim) {\n present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true;\n }\n\n return rangeKeys.every((field) => {\n const val = fields[field].internalValue;\n return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype);\n }) && present;\n } : () => false;\n })(propModel));\n }\n\n let filteredModel;\n if (operation === LOGICAL_OPERATORS.AND) {\n filteredModel = clonedModel.select((fields, i) => fns.every(fn => fn(fields, i)), {\n saveChild: false\n });\n } else {\n filteredModel = clonedModel.select((fields, i) => fns.some(fn => fn(fields, i)), {\n saveChild: false\n });\n }\n\n return filteredModel;\n};\n\n\nexport const splitWithSelect = (sourceDm, dimensionArr, reducerFn = val => val, config) => {\n const {\n saveChild,\n } = config;\n const fieldStoreObj = sourceDm.getFieldspace().fieldsObj();\n\n const {\n splitRowDiffset,\n dimensionMap\n } = selectHelper(\n sourceDm.clone(saveChild),\n reducerFn,\n config,\n sourceDm,\n (...params) => rowSplitDiffsetIterator(...params, dimensionArr, fieldStoreObj)\n );\n\n const clonedDMs = [];\n Object.keys(splitRowDiffset).sort().forEach((e) => {\n if (splitRowDiffset[e]) {\n const cloned = sourceDm.clone(saveChild);\n const derivation = dimensionMap[e];\n cloned._rowDiffset = splitRowDiffset[e].join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n const derivationFormula = fields => dimensionArr.every(_ => fields[_].internalValue === derivation.keys[_]);\n // Store reference to child model and selector function\n if (saveChild) {\n persistDerivations(sourceDm, cloned, DM_DERIVATIVES.SELECT, config, derivationFormula);\n }\n cloned._derivation[cloned._derivation.length - 1].meta = dimensionMap[e];\n\n clonedDMs.push(cloned);\n }\n });\n\n\n return clonedDMs;\n};\nexport const addDiffsetToClonedDm = (clonedDm, rowDiffset, sourceDm, selectConfig, selectFn) => {\n clonedDm._rowDiffset = rowDiffset;\n clonedDm.__calculateFieldspace().calculateFieldsConfig();\n persistDerivations(\n sourceDm,\n clonedDm,\n DM_DERIVATIVES.SELECT,\n { config: selectConfig },\n selectFn\n );\n};\n\n\nexport const cloneWithSelect = (sourceDm, selectFn, selectConfig, cloneConfig) => {\n let extraCloneDm = {};\n\n let { mode } = selectConfig;\n\n const cloned = sourceDm.clone(cloneConfig.saveChild);\n const setOfRowDiffsets = selectHelper(\n cloned,\n selectFn,\n selectConfig,\n sourceDm,\n selectRowDiffsetIterator\n );\n const diffIndex = selectModeMap[mode].diffIndex;\n\n addDiffsetToClonedDm(cloned, setOfRowDiffsets[diffIndex[0]], sourceDm, selectConfig, selectFn);\n\n if (diffIndex.length > 1) {\n extraCloneDm = sourceDm.clone(cloneConfig.saveChild);\n addDiffsetToClonedDm(extraCloneDm, setOfRowDiffsets[diffIndex[1]], sourceDm, selectConfig, selectFn);\n return [cloned, extraCloneDm];\n }\n\n return cloned;\n};\n\nexport const cloneWithProject = (sourceDm, projField, config, allFields) => {\n const cloned = sourceDm.clone(config.saveChild);\n let projectionSet = projField;\n if (config.mode === FilteringMode.INVERSE) {\n projectionSet = allFields.filter(fieldName => projField.indexOf(fieldName) === -1);\n }\n // cloned._colIdentifier = sourceDm._colIdentifier.split(',')\n // .filter(coll => projectionSet.indexOf(coll) !== -1).join();\n cloned._colIdentifier = projectionSet.join(',');\n cloned.__calculateFieldspace().calculateFieldsConfig();\n\n persistDerivations(\n sourceDm,\n cloned,\n DM_DERIVATIVES.PROJECT,\n { projField, config, actualProjField: projectionSet },\n null\n );\n\n return cloned;\n};\n\n\nexport const splitWithProject = (sourceDm, projFieldSet, config, allFields) =>\n projFieldSet.map(projFields =>\n cloneWithProject(sourceDm, projFields, config, allFields));\n\nexport const sanitizeUnitSchema = (unitSchema) => {\n // Do deep clone of the unit schema as the user might change it later.\n unitSchema = extend2({}, unitSchema);\n if (!unitSchema.type) {\n unitSchema.type = FieldType.DIMENSION;\n }\n\n if (!unitSchema.subtype) {\n switch (unitSchema.type) {\n case FieldType.MEASURE:\n unitSchema.subtype = MeasureSubtype.CONTINUOUS;\n break;\n default:\n case FieldType.DIMENSION:\n unitSchema.subtype = DimensionSubtype.CATEGORICAL;\n break;\n }\n }\n\n return unitSchema;\n};\n\nexport const validateUnitSchema = (unitSchema) => {\n const { type, subtype, name } = unitSchema;\n if (type === FieldType.DIMENSION || type === FieldType.MEASURE) {\n if (!fieldRegistry.has(subtype)) {\n throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`);\n }\n } else {\n throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`);\n }\n};\n\nexport const sanitizeAndValidateSchema = schema => schema.map((unitSchema) => {\n unitSchema = sanitizeUnitSchema(unitSchema);\n validateUnitSchema(unitSchema);\n return unitSchema;\n});\n\nexport const resolveFieldName = (schema, dataHeader) => {\n schema.forEach((unitSchema) => {\n const fieldNameAs = unitSchema.as;\n if (!fieldNameAs) { return; }\n\n const idx = dataHeader.indexOf(unitSchema.name);\n dataHeader[idx] = fieldNameAs;\n unitSchema.name = fieldNameAs;\n delete unitSchema.as;\n });\n};\n\nexport const updateData = (relation, data, schema, options) => {\n schema = sanitizeAndValidateSchema(schema);\n options = Object.assign(Object.assign({}, defaultConfig), options);\n const converter = converterStore.get(options.dataFormat);\n\n\n if (!converter) {\n throw new Error(`No converter function found for ${options.dataFormat} format`);\n }\n\n const [header, formattedData] = converter.convert(data, schema, options);\n resolveFieldName(schema, header);\n const fieldArr = createFields(formattedData, schema, header);\n\n // This will create a new fieldStore with the fields\n const nameSpace = fieldStore.createNamespace(fieldArr, options.name);\n relation._partialFieldspace = nameSpace;\n\n // If data is provided create the default colIdentifier and rowDiffset\n relation._rowDiffset = formattedData.length && formattedData[0].length ? `0-${formattedData[0].length - 1}` : '';\n\n // This stores the value objects which is passed to the filter method when selection operation is done.\n const valueObjects = [];\n const { fields } = nameSpace;\n const rawFieldsData = fields.map(field => field.data());\n const formattedFieldsData = fields.map(field => field.formattedData());\n rowDiffsetIterator(relation._rowDiffset, (i) => {\n valueObjects[i] = prepareSelectionData(fields, formattedFieldsData, rawFieldsData, i);\n });\n nameSpace._cachedValueObjects = valueObjects;\n\n relation._colIdentifier = (schema.map(_ => _.name)).join();\n relation._dataFormat = options.dataFormat === DataFormat.AUTO ? detectDataFormat(data) : options.dataFormat;\n return relation;\n};\n\nexport const fieldInSchema = (schema, field) => {\n let i = 0;\n\n for (; i < schema.length; ++i) {\n if (field === schema[i].name) {\n return {\n name: field,\n type: schema[i].subtype || schema[i].type,\n index: i,\n };\n }\n }\n return null;\n};\n\nexport const getDerivationArguments = (derivation) => {\n let params = [];\n let operation;\n operation = derivation.op;\n switch (operation) {\n case DM_DERIVATIVES.SELECT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.PROJECT:\n params = [derivation.meta.actualProjField];\n break;\n case DM_DERIVATIVES.SORT:\n params = [derivation.criteria];\n break;\n case DM_DERIVATIVES.GROUPBY:\n operation = 'groupBy';\n params = [derivation.meta.groupByString.split(','), derivation.criteria];\n break;\n default:\n operation = null;\n }\n\n return {\n operation,\n params\n };\n};\n\nconst applyExistingOperationOnModel = (propModel, dataModel) => {\n const derivations = dataModel.getDerivations();\n let selectionModel = propModel;\n\n derivations.forEach((derivation) => {\n if (!derivation) {\n return;\n }\n\n const { operation, params } = getDerivationArguments(derivation);\n if (operation) {\n selectionModel = selectionModel[operation](...params, {\n saveChild: false\n });\n }\n });\n\n return selectionModel;\n};\n\nconst getFilteredModel = (propModel, path) => {\n for (let i = 0, len = path.length; i < len; i++) {\n const model = path[i];\n propModel = applyExistingOperationOnModel(propModel, model);\n }\n return propModel;\n};\n\nconst propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => {\n const excludeModels = propModelInf.excludeModels || [];\n const criterias = propModelInf.criteria;\n\n const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true;\n\n propagate && dataModel.handlePropagation(propModel, config);\n\n const children = dataModel._children;\n children.forEach((child) => {\n const matchingCriteria = criterias.filter(val => val.groupedModel === child);\n let selectionModel = applyExistingOperationOnModel(propModel, child);\n\n if (matchingCriteria.length) {\n selectionModel = filterPropagationModel(selectionModel, matchingCriteria, {\n filterByDim: false,\n filterByMeasure: true,\n clone: false\n });\n }\n propagateIdentifiers(child, selectionModel, config, propModelInf);\n });\n};\n\nexport const getRootGroupByModel = (model) => {\n while (model._parent && model._derivation.find(d => d.op !== DM_DERIVATIVES.GROUPBY)) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getRootDataModel = (model) => {\n while (model._parent) {\n model = model._parent;\n }\n return model;\n};\n\nexport const getPathToRootModel = (model, path = []) => {\n while (model._parent) {\n path.push(model);\n model = model._parent;\n }\n return path;\n};\n\nexport const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => {\n let criteria;\n const { propagationNameSpace, propagateToSource } = propagationInf;\n const propagationSourceId = propagationInf.sourceId;\n const filterFn = (entry) => {\n const filter = config.filterFn || (() => true);\n return filter(entry, config);\n };\n\n const addGroupedModel = ({ config: conf, model }) => {\n const { criteria: crit } = conf;\n let groupedModel;\n\n if (crit !== null && crit.fields.some(d => d.type === FieldType.MEASURE)) {\n groupedModel = getRootGroupByModel(model);\n }\n return Object.assign({}, conf, {\n groupedModel\n });\n };\n\n let criterias = [];\n\n if (identifiers === null) {\n criterias = [{\n criteria: []\n }];\n criteria = [];\n } else {\n let actionCriterias = Object.values(propagationNameSpace.mutableActions);\n if (propagateToSource !== false) {\n actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId);\n }\n\n const filteredCriteria = actionCriterias.filter(filterFn);\n\n const excludeModels = [];\n\n if (propagateToSource !== false) {\n const sourceActionCriterias = Object.values(propagationNameSpace.mutableActions);\n\n sourceActionCriterias.forEach((actionInf) => {\n const actionConf = actionInf.config;\n if (actionConf.applyOnSource === false && actionConf.action === config.action &&\n actionConf.sourceId !== propagationSourceId) {\n excludeModels.push(actionInf.model);\n criteria = sourceActionCriterias.filter(d => d !== actionInf).map(addGroupedModel);\n criteria.length && criterias.push({\n criteria,\n models: actionInf.model,\n path: getPathToRootModel(actionInf.model)\n });\n }\n });\n }\n\n\n criteria = [].concat(...[...filteredCriteria.map(addGroupedModel), {\n criteria: identifiers,\n groupedModel: identifiers !== null && identifiers.fields.some(d => d.type === FieldType.MEASURE) ?\n getRootGroupByModel(propagationInf.propagationSource) : null\n }]).filter(d => d !== null);\n criterias.push({\n criteria,\n excludeModels: [...excludeModels, ...config.excludeModels || []]\n });\n }\n\n const rootModel = rootModels.model;\n\n const propConfig = Object.assign({\n sourceIdentifiers: identifiers,\n propagationSourceId\n }, config);\n\n criterias.forEach((inf) => {\n const { criteria: crit } = inf;\n const propagationModel = filterPropagationModel(rootModel, crit, {\n filterByMeasure: !!crit.find(d => d.groupedModel === rootModel)\n });\n const path = inf.path;\n\n if (path) {\n const filteredModel = getFilteredModel(propagationModel, path.reverse());\n inf.models.handlePropagation(filteredModel, propConfig);\n } else {\n propagateIdentifiers(rootModel, propagationModel, propConfig, {\n excludeModels: inf.excludeModels,\n criteria: crit\n });\n }\n });\n};\n\nexport const propagateImmutableActions = (propagationNameSpace, rootModel, propagationInf) => {\n const immutableActions = propagationNameSpace.immutableActions;\n\n for (const action in immutableActions) {\n const actionInf = immutableActions[action];\n const actionConf = actionInf.config;\n const propagationSourceId = propagationInf.config.sourceId;\n const filterImmutableAction = propagationInf.propConfig.filterImmutableAction ?\n propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true;\n if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) {\n const criteriaModel = actionConf.criteria;\n propagateToAllDataModels(criteriaModel, {\n model: rootModel,\n groupByModel: getRootGroupByModel(actionInf.model)\n }, {\n propagationNameSpace,\n propagateToSource: false,\n sourceId: propagationSourceId,\n propagationSource: actionInf.model\n }, actionConf);\n }\n }\n};\n\nexport const addToPropNamespace = (propagationNameSpace, config = {}, model) => {\n let sourceNamespace;\n const isMutableAction = config.isMutableAction;\n const criteria = config.criteria;\n const key = `${config.action}-${config.sourceId}`;\n\n if (isMutableAction) {\n sourceNamespace = propagationNameSpace.mutableActions;\n } else {\n sourceNamespace = propagationNameSpace.immutableActions;\n }\n\n if (criteria === null) {\n delete sourceNamespace[key];\n } else {\n sourceNamespace[key] = {\n model,\n config\n };\n }\n\n return this;\n};\n\n\nexport const getNormalizedProFields = (projField, allFields, fieldConfig) => {\n const normalizedProjField = projField.reduce((acc, field) => {\n if (field.constructor.name === 'RegExp') {\n acc.push(...allFields.filter(fieldName => fieldName.search(field) !== -1));\n } else if (field in fieldConfig) {\n acc.push(field);\n }\n return acc;\n }, []);\n return Array.from(new Set(normalizedProjField)).map(field => field.trim());\n};\n\n/**\n * Get the numberFormatted value if numberFormat present,\n * else returns the supplied value.\n * @param {Object} field Field Instance\n * @param {Number|String} value\n * @return {Number|String}\n */\nexport const getNumberFormattedVal = (field, value) => {\n if (field.numberFormat) {\n return field.numberFormat()(value);\n }\n return value;\n};\n","import { FilteringMode } from './enums';\nimport { getUniqueId } from './utils';\nimport {\n updateFields,\n cloneWithSelect,\n cloneWithProject,\n updateData,\n getNormalizedProFields\n} from './helper';\nimport { crossProduct, difference, naturalJoinFilter, union } from './operator';\n\n/**\n * Relation provides the definitions of basic operators of relational algebra like *selection*, *projection*, *union*,\n * *difference* etc.\n *\n * It is extended by {@link DataModel} to inherit the functionalities of relational algebra concept.\n *\n * @class\n * @public\n * @module Relation\n * @namespace DataModel\n */\nclass Relation {\n\n /**\n * Creates a new Relation instance by providing underlying data and schema.\n *\n * @private\n *\n * @param {Object | string | Relation} data - The input tabular data in dsv or json format or\n * an existing Relation instance object.\n * @param {Array} schema - An array of data schema.\n * @param {Object} [options] - The optional options.\n */\n constructor (...params) {\n let source;\n\n this._parent = null;\n this._derivation = [];\n this._ancestorDerivation = [];\n this._children = [];\n\n if (params.length === 1 && ((source = params[0]) instanceof Relation)) {\n // parent datamodel was passed as part of source\n this._colIdentifier = source._colIdentifier;\n this._rowDiffset = source._rowDiffset;\n this._dataFormat = source._dataFormat;\n this._parent = source;\n this._partialFieldspace = this._parent._partialFieldspace;\n this._fieldStoreName = getUniqueId();\n this.__calculateFieldspace().calculateFieldsConfig();\n } else {\n updateData(this, ...params);\n this._fieldStoreName = this._partialFieldspace.name;\n this.__calculateFieldspace().calculateFieldsConfig();\n this._propagationNameSpace = {\n mutableActions: {},\n immutableActions: {}\n };\n }\n }\n\n /**\n * Retrieves the {@link Schema | schema} details for every {@link Field | field} as an array.\n *\n * @public\n *\n * @return {Array.} Array of fields schema.\n * ```\n * [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', numberFormat: (val) => `${val} miles / gallon` },\n * { name: 'Cylinder', type: 'dimension' },\n * { name: 'Displacement', type: 'measure', defAggFn: 'max' },\n * { name: 'HorsePower', type: 'measure', defAggFn: 'max' },\n * { name: 'Weight_in_lbs', type: 'measure', defAggFn: 'avg', },\n * { name: 'Acceleration', type: 'measure', defAggFn: 'avg' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin' }\n * ]\n * ```\n */\n getSchema () {\n return this.getFieldspace().fields.map(d => d.schema());\n }\n\n /**\n * Returns the name of the {@link DataModel} instance. If no name was specified during {@link DataModel}\n * initialization, then it returns a auto-generated name.\n *\n * @public\n *\n * @return {string} Name of the DataModel instance.\n */\n getName() {\n return this._fieldStoreName;\n }\n\n getFieldspace () {\n return this._fieldspace;\n }\n\n __calculateFieldspace () {\n this._fieldspace = updateFields([this._rowDiffset, this._colIdentifier],\n this.getPartialFieldspace(), this._fieldStoreName);\n return this;\n }\n\n getPartialFieldspace () {\n return this._partialFieldspace;\n }\n\n /**\n * Performs {@link link_of_cross_product | cross-product} between two {@link DataModel} instances and returns a\n * new {@link DataModel} instance containing the results. This operation is also called theta join.\n *\n * Cross product takes two set and create one set where each value of one set is paired with each value of another\n * set.\n *\n * This method takes an optional predicate which filters the generated result rows. If the predicate returns true\n * the combined row is included in the resulatant table.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.join(originDM)));\n *\n * console.log(carsDM.join(originDM,\n * obj => obj.[originDM.getName()].Origin === obj.[carsDM.getName()].Origin));\n *\n * @text\n * This is chained version of `join` operator. `join` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel to be joined with the current instance DataModel.\n * @param {SelectionPredicate} filterFn - The predicate function that will filter the result of the crossProduct.\n *\n * @return {DataModel} New DataModel instance created after joining.\n */\n join (joinWith, filterFn) {\n return crossProduct(this, joinWith, filterFn);\n }\n\n /**\n * {@link natural_join | Natural join} is a special kind of cross-product join where filtering of rows are performed\n * internally by resolving common fields are from both table and the rows with common value are included.\n *\n * @example\n * let originDM = dm.project(['Origin','Origin_Formal_Name']);\n * let carsDM = dm.project(['Name','Miles_per_Gallon','Origin'])\n *\n * console.log(carsDM.naturalJoin(originDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} joinWith - The DataModel with which the current instance of DataModel on which the method is\n * called will be joined.\n * @return {DataModel} New DataModel instance created after joining.\n */\n naturalJoin (joinWith) {\n return crossProduct(this, joinWith, naturalJoinFilter(this, joinWith), true);\n }\n\n /**\n * {@link link_to_union | Union} operation can be termed as vertical stacking of all rows from both the DataModel\n * instances, provided that both of the {@link DataModel} instances should have same column names.\n *\n * @example\n * console.log(EuropeanMakerDM.union(USAMakerDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} unionWith - DataModel instance for which union has to be applied with the instance on which\n * the method is called\n *\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n union (unionWith) {\n return union(this, unionWith);\n }\n\n /**\n * {@link link_to_difference | Difference } operation only include rows which are present in the datamodel on which\n * it was called but not on the one passed as argument.\n *\n * @example\n * console.log(highPowerDM.difference(highExpensiveDM));\n *\n * @text\n * This is chained version of `naturalJoin` operator. `naturalJoin` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {DataModel} differenceWith - DataModel instance for which difference has to be applied with the instance\n * on which the method is called\n * @return {DataModel} New DataModel instance with the result of the operation\n */\n difference (differenceWith) {\n return difference(this, differenceWith);\n }\n\n /**\n * {@link link_to_selection | Selection} is a row filtering operation. It expects a predicate and an optional mode\n * which control which all rows should be included in the resultant DataModel instance.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection operation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resultant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * // with selection mode NORMAL:\n * const normDt = dt.select(fields => fields.Origin.value === \"USA\")\n * console.log(normDt));\n *\n * // with selection mode INVERSE:\n * const inverDt = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt);\n *\n * // with selection mode ALL:\n * const dtArr = dt.select(fields => fields.Origin.value === \"USA\", { mode: DataModel.FilteringMode.ALL })\n * // print the selected parts\n * console.log(dtArr[0]);\n * // print the inverted parts\n * console.log(dtArr[1]);\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Function} selectFn - The predicate function which is called for each row with the current row.\n * ```\n * function (row, i, cloneProvider, store) { ... }\n * ```\n * @param {Object} config - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance.\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection.\n * @return {DataModel} Returns the new DataModel instance(s) after operation.\n */\n select (selectFn, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n config.mode = config.mode || defConfig.mode;\n\n const cloneConfig = { saveChild: config.saveChild };\n return cloneWithSelect(\n this,\n selectFn,\n config,\n cloneConfig\n );\n }\n\n /**\n * Retrieves a boolean value if the current {@link DataModel} instance has data.\n *\n * @example\n * const schema = [\n * { name: 'CarName', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n * const data = [];\n *\n * const dt = new DataModel(data, schema);\n * console.log(dt.isEmpty());\n *\n * @public\n *\n * @return {Boolean} True if the datamodel has no data, otherwise false.\n */\n isEmpty () {\n return !this._rowDiffset.length || !this._colIdentifier.length;\n }\n\n /**\n * Creates a clone from the current DataModel instance with child parent relationship.\n *\n * @private\n * @param {boolean} [saveChild=true] - Whether the cloned instance would be recorded in the parent instance.\n * @return {DataModel} - Returns the newly cloned DataModel instance.\n */\n clone (saveChild = true) {\n const clonedDm = new this.constructor(this);\n if (saveChild) {\n clonedDm.setParent(this);\n } else {\n clonedDm.setParent(null);\n }\n return clonedDm;\n }\n\n /**\n * {@link Projection} is filter column (field) operation. It expects list of fields' name and either include those\n * or exclude those based on {@link FilteringMode} on the resultant variable.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @example\n * const dm = new DataModel(data, schema);\n *\n * // with projection mode NORMAL:\n * const normDt = dt.project([\"Name\", \"HorsePower\"]);\n * console.log(normDt.getData());\n *\n * // with projection mode INVERSE:\n * const inverDt = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.INVERSE })\n * console.log(inverDt.getData());\n *\n * // with selection mode ALL:\n * const dtArr = dt.project([\"Name\", \"HorsePower\"], { mode: DataModel.FilteringMode.ALL })\n * // print the normal parts\n * console.log(dtArr[0].getData());\n * // print the inverted parts\n * console.log(dtArr[1].getData());\n *\n * @text\n * This is chained version of `select` operator. `select` can also be used as\n * {@link link_to_join_op | functional operator}.\n *\n * @public\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {DataModel} Returns the new DataModel instance after operation.\n */\n project (projField, config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n config = Object.assign({}, defConfig, config);\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const { mode } = config;\n const normalizedProjField = getNormalizedProFields(projField, allFields, fieldConfig);\n\n let dataModel;\n\n if (mode === FilteringMode.ALL) {\n let projectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.NORMAL,\n saveChild: config.saveChild\n }, allFields);\n let rejectionClone = cloneWithProject(this, normalizedProjField, {\n mode: FilteringMode.INVERSE,\n saveChild: config.saveChild\n }, allFields);\n dataModel = [projectionClone, rejectionClone];\n } else {\n let projectionClone = cloneWithProject(this, normalizedProjField, config, allFields);\n dataModel = projectionClone;\n }\n\n return dataModel;\n }\n\n getFieldsConfig () {\n return this._fieldConfig;\n }\n\n calculateFieldsConfig () {\n this._fieldConfig = this._fieldspace.fields.reduce((acc, fieldObj, i) => {\n acc[fieldObj.name()] = {\n index: i,\n def: fieldObj.schema(),\n };\n return acc;\n }, {});\n return this;\n }\n\n\n /**\n * Frees up the resources associated with the current DataModel instance and breaks all the links instance has in\n * the DAG.\n *\n * @public\n */\n dispose () {\n this._parent && this._parent.removeChild(this);\n this._parent = null;\n this._children.forEach((child) => {\n child._parent = null;\n });\n this._children = [];\n }\n\n /**\n * Removes the specified child {@link DataModel} from the child list of the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\")\n * dt.removeChild(dt2);\n *\n * @private\n *\n * @param {DataModel} child - Delegates the parent to remove this child.\n */\n removeChild (child) {\n let idx = this._children.findIndex(sibling => sibling === child);\n idx !== -1 ? this._children.splice(idx, 1) : true;\n }\n\n /**\n * Sets the specified {@link DataModel} as a parent for the current {@link DataModel} instance.\n *\n * @param {DataModel} parent - The datamodel instance which will act as parent.\n */\n setParent (parent) {\n this._parent && this._parent.removeChild(this);\n this._parent = parent;\n parent && parent._children.push(this);\n }\n\n /**\n * Returns the parent {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const parentDm = dt2.getParent();\n *\n * @return {DataModel} Returns the parent DataModel instance.\n */\n getParent () {\n return this._parent;\n }\n\n /**\n * Returns the immediate child {@link DataModel} instances.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n *\n * const childDm1 = dt.select(fields => fields.Origin.value === \"USA\");\n * const childDm2 = dt.select(fields => fields.Origin.value === \"Japan\");\n * const childDm3 = dt.groupBy([\"Origin\"]);\n *\n * @return {DataModel[]} Returns the immediate child DataModel instances.\n */\n getChildren () {\n return this._children;\n }\n\n /**\n * Returns the in-between operation meta data while creating the current {@link DataModel} instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const derivations = dt3.getDerivations();\n *\n * @return {Any[]} Returns the derivation meta data.\n */\n getDerivations () {\n return this._derivation;\n }\n\n /**\n * Returns the in-between operation meta data happened from root {@link DataModel} to current instance.\n *\n * @example\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'HorsePower', type: 'measure' },\n * { name: \"Origin\", type: 'dimension' }\n * ];\n *\n * const data = [\n * { Name: \"chevrolet chevelle malibu\", Horsepower: 130, Origin: \"USA\" },\n * { Name: \"citroen ds-21 pallas\", Horsepower: 115, Origin: \"Europe\" },\n * { Name: \"datsun pl510\", Horsepower: 88, Origin: \"Japan\" },\n * { Name: \"amc rebel sst\", Horsepower: 150, Origin: \"USA\"},\n * ]\n *\n * const dt = new DataModel(data, schema);\n * const dt2 = dt.select(fields => fields.Origin.value === \"USA\");\n * const dt3 = dt2.groupBy([\"Origin\"]);\n * const ancDerivations = dt3.getAncestorDerivations();\n *\n * @return {Any[]} Returns the previous derivation meta data.\n */\n getAncestorDerivations () {\n return this._ancestorDerivation;\n }\n}\n\nexport default Relation;\n","/* eslint-disable default-case */\n\nimport { FieldType, DimensionSubtype, DataFormat, FilteringMode } from './enums';\nimport {\n persistDerivations,\n getRootGroupByModel,\n propagateToAllDataModels,\n getRootDataModel,\n propagateImmutableActions,\n addToPropNamespace,\n sanitizeUnitSchema,\n splitWithSelect,\n splitWithProject,\n getNormalizedProFields\n} from './helper';\nimport { DM_DERIVATIVES, PROPAGATION } from './constants';\nimport {\n dataBuilder,\n rowDiffsetIterator,\n groupBy\n} from './operator';\nimport { createBinnedFieldData } from './operator/bucket-creator';\nimport Relation from './relation';\nimport reducerStore from './utils/reducer-store';\nimport { createFields } from './field-creator';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport Value from './value';\nimport { converterStore } from './converter';\nimport { fieldRegistry } from './fields';\n\n/**\n * DataModel is an in-browser representation of tabular data. It supports\n * {@link https://en.wikipedia.org/wiki/Relational_algebra | relational algebra} operators as well as generic data\n * processing opearators.\n * DataModel extends {@link Relation} class which defines all the relational algebra opreators. DataModel gives\n * definition of generic data processing operators which are not relational algebra complient.\n *\n * @public\n * @class\n * @extends Relation\n * @memberof Datamodel\n */\nclass DataModel extends Relation {\n /**\n * Creates a new DataModel instance by providing data and schema. Data could be in the form of\n * - Flat JSON\n * - DSV String\n * - 2D Array\n *\n * By default DataModel finds suitable adapter to serialize the data. DataModel also expects a\n * {@link Schema | schema} for identifying the variables present in data.\n *\n * @constructor\n * @example\n * const data = loadData('cars.csv');\n * const schema = [\n * { name: 'Name', type: 'dimension' },\n * { name: 'Miles_per_Gallon', type: 'measure', unit : 'cm', scale: '1000', numberformat: val => `${val}G`},\n * { name: 'Cylinders', type: 'dimension' },\n * { name: 'Displacement', type: 'measure' },\n * { name: 'Horsepower', type: 'measure' },\n * { name: 'Weight_in_lbs', type: 'measure' },\n * { name: 'Acceleration', type: 'measure' },\n * { name: 'Year', type: 'dimension', subtype: 'datetime', format: '%Y' },\n * { name: 'Origin', type: 'dimension' }\n * ];\n * const dm = new DataModel(data, schema, { name: 'Cars' });\n * table(dm);\n *\n * @public\n *\n * @param {Array. | string | Array.} data Input data in any of the mentioned formats\n * @param {Array.} schema Defination of the variables. Order of the variables in data and order of the\n * variables in schema has to be same.\n * @param {object} [options] Optional arguments to specify more settings regarding the creation part\n * @param {string} [options.name] Name of the datamodel instance. If no name is given an auto generated name is\n * assigned to the instance.\n * @param {string} [options.fieldSeparator=','] specify field separator type if the data is of type dsv string.\n */\n constructor (...args) {\n super(...args);\n\n this._onPropagation = [];\n }\n\n /**\n * Reducers are simple functions which reduces an array of numbers to a representative number of the set.\n * Like an array of numbers `[10, 20, 5, 15]` can be reduced to `12.5` if average / mean reducer function is\n * applied. All the measure fields in datamodel (variables in data) needs a reducer to handle aggregation.\n *\n * @public\n *\n * @return {ReducerStore} Singleton instance of {@link ReducerStore}.\n */\n static get Reducers () {\n return reducerStore;\n }\n\n /**\n * Converters are functions that transforms data in various format tpo datamodel consumabe format.\n */\n static get Converters() {\n return converterStore;\n }\n\n /**\n * Register new type of fields\n */\n static get FieldTypes() {\n return fieldRegistry;\n }\n\n /**\n * Configure null, undefined, invalid values in the source data\n *\n * @public\n *\n * @param {Object} [config] - Configuration to control how null, undefined and non-parsable values are\n * represented in DataModel.\n * @param {string} [config.undefined] - Define how an undefined value will be represented.\n * @param {string} [config.null] - Define how a null value will be represented.\n * @param {string} [config.invalid] - Define how a non-parsable value will be represented.\n */\n static configureInvalidAwareTypes (config) {\n return InvalidAwareTypes.invalidAwareVals(config);\n }\n\n /**\n * Retrieve the data attached to an instance in JSON format.\n *\n * @example\n * // DataModel instance is already prepared and assigned to dm variable\n * const data = dm.getData({\n * order: 'column',\n * formatter: {\n * origin: (val) => val === 'European Union' ? 'EU' : val;\n * }\n * });\n * console.log(data);\n *\n * @public\n *\n * @param {Object} [options] Options to control how the raw data is to be returned.\n * @param {string} [options.order='row'] Defines if data is retieved in row order or column order. Possible values\n * are `'rows'` and `'columns'`\n * @param {Function} [options.formatter=null] Formats the output data. This expects an object, where the keys are\n * the name of the variable needs to be formatted. The formatter function is called for each row passing the\n * value of the cell for a particular row as arguments. The formatter is a function in the form of\n * `function (value, rowId, schema) => { ... }`\n * Know more about {@link Fomatter}.\n *\n * @return {Array} Returns a multidimensional array of the data with schema. The return format looks like\n * ```\n * {\n * data,\n * schema\n * }\n * ```\n */\n getData (options) {\n const defOptions = {\n order: 'row',\n formatter: null,\n withUid: false,\n getAllFields: false,\n sort: []\n };\n options = Object.assign({}, defOptions, options);\n const fields = this.getPartialFieldspace().fields;\n\n const dataGenerated = dataBuilder.call(\n this,\n this.getPartialFieldspace().fields,\n this._rowDiffset,\n options.getAllFields ? fields.map(d => d.name()).join() : this._colIdentifier,\n options.sort,\n {\n columnWise: options.order === 'column',\n addUid: !!options.withUid\n }\n );\n\n if (!options.formatter) {\n return dataGenerated;\n }\n\n const { formatter } = options;\n const { data, schema, uids } = dataGenerated;\n const fieldNames = schema.map((e => e.name));\n const fmtFieldNames = Object.keys(formatter);\n const fmtFieldIdx = fmtFieldNames.reduce((acc, next) => {\n const idx = fieldNames.indexOf(next);\n if (idx !== -1) {\n acc.push([idx, formatter[next]]);\n }\n return acc;\n }, []);\n\n if (options.order === 'column') {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n data[fIdx].forEach((datum, datumIdx) => {\n data[fIdx][datumIdx] = fmtFn.call(\n undefined,\n datum,\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n } else {\n data.forEach((datum, datumIdx) => {\n fmtFieldIdx.forEach((elem) => {\n const fIdx = elem[0];\n const fmtFn = elem[1];\n\n datum[fIdx] = fmtFn.call(\n undefined,\n datum[fIdx],\n uids[datumIdx],\n schema[fIdx]\n );\n });\n });\n }\n\n return dataGenerated;\n }\n\n /**\n * Returns the unique ids in an array.\n *\n * @return {Array} Returns an array of ids.\n */\n getUids () {\n const rowDiffset = this._rowDiffset;\n const ids = [];\n\n if (rowDiffset.length) {\n const diffSets = rowDiffset.split(',');\n\n diffSets.forEach((set) => {\n let [start, end] = set.split('-').map(Number);\n\n end = end !== undefined ? end : start;\n ids.push(...Array(end - start + 1).fill().map((_, idx) => start + idx));\n });\n }\n\n return ids;\n }\n /**\n * Groups the data using particular dimensions and by reducing measures. It expects a list of dimensions using which\n * it projects the datamodel and perform aggregations to reduce the duplicate tuples. Refer this\n * {@link link_to_one_example_with_group_by | document} to know the intuition behind groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupedDM = dm.groupBy(['Year'], { horsepower: 'max' } );\n * console.log(groupedDm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {DataModel} Returns a new DataModel instance after performing the groupby.\n */\n groupBy (fieldsArr, reducers = {}, config = { saveChild: true }) {\n const groupByString = `${fieldsArr.join()}`;\n let params = [this, fieldsArr, reducers];\n const newDataModel = groupBy(...params);\n\n persistDerivations(\n this,\n newDataModel,\n DM_DERIVATIVES.GROUPBY,\n { fieldsArr, groupByString, defaultReducer: reducerStore.defaultReducer() },\n reducers\n );\n\n if (config.saveChild) {\n newDataModel.setParent(this);\n } else {\n newDataModel.setParent(null);\n }\n\n return newDataModel;\n }\n\n /**\n * Performs sorting operation on the current {@link DataModel} instance according to the specified sorting details.\n * Like every other operator it doesn't mutate the current DataModel instance on which it was called, instead\n * returns a new DataModel instance containing the sorted data.\n *\n * DataModel support multi level sorting by listing the variables using which sorting needs to be performed and\n * the type of sorting `ASC` or `DESC`.\n *\n * In the following example, data is sorted by `Origin` field in `DESC` order in first level followed by another\n * level of sorting by `Acceleration` in `ASC` order.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * let sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\"] // Default value is ASC\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * // Sort with a custom sorting function\n * sortedDm = dm.sort([\n * [\"Origin\", \"DESC\"]\n * [\"Acceleration\", (a, b) => a - b] // Custom sorting function\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @text\n * DataModel also provides another sorting mechanism out of the box where sort is applied to a variable using\n * another variable which determines the order.\n * Like the above DataModel contains three fields `Origin`, `Name` and `Acceleration`. Now, the data in this\n * model can be sorted by `Origin` field according to the average value of all `Acceleration` for a\n * particular `Origin` value.\n *\n * @example\n * // here dm is the pre-declared DataModel instance containing the data of 'cars.json' file\n * const sortedDm = dm.sort([\n * ['Origin', ['Acceleration', (a, b) => avg(...a.Acceleration) - avg(...b.Acceleration)]]\n * ]);\n *\n * console.log(dm.getData());\n * console.log(sortedDm.getData());\n *\n * @public\n *\n * @param {Array.} sortingDetails - Sorting details based on which the sorting will be performed.\n * @return {DataModel} Returns a new instance of DataModel with sorted data.\n */\n sort (sortingDetails, config = { saveChild: false }) {\n const rawData = this.getData({\n order: 'row',\n sort: sortingDetails\n });\n const header = rawData.schema.map(field => field.name);\n const dataInCSVArr = [header].concat(rawData.data);\n\n const sortedDm = new this.constructor(dataInCSVArr, rawData.schema, { dataFormat: 'DSVArr' });\n\n persistDerivations(\n this,\n sortedDm,\n DM_DERIVATIVES.SORT,\n config,\n sortingDetails\n );\n\n if (config.saveChild) {\n sortedDm.setParent(this);\n } else {\n sortedDm.setParent(null);\n }\n\n return sortedDm;\n }\n\n /**\n * Performs the serialization operation on the current {@link DataModel} instance according to the specified data\n * type. When an {@link DataModel} instance is created, it de-serializes the input data into its internal format,\n * and during its serialization process, it converts its internal data format to the specified data type and returns\n * that data regardless what type of data is used during the {@link DataModel} initialization.\n *\n * @example\n * // here dm is the pre-declared DataModel instance.\n * const csvData = dm.serialize(DataModel.DataFormat.DSV_STR, { fieldSeparator: \",\" });\n * console.log(csvData); // The csv formatted data.\n *\n * const jsonData = dm.serialize(DataModel.DataFormat.FLAT_JSON);\n * console.log(jsonData); // The json data.\n *\n * @public\n *\n * @param {string} type - The data type name for serialization.\n * @param {Object} options - The optional option object.\n * @param {string} options.fieldSeparator - The field separator character for DSV data type.\n * @return {Array|string} Returns the serialized data.\n */\n serialize (type, options) {\n type = type || this._dataFormat;\n options = Object.assign({}, { fieldSeparator: ',' }, options);\n\n const fields = this.getFieldspace().fields;\n const colData = fields.map(f => f.formattedData());\n const rowsCount = colData[0].length;\n let serializedData;\n let rowIdx;\n let colIdx;\n\n if (type === DataFormat.FLAT_JSON) {\n serializedData = [];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = {};\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row[fields[colIdx].name()] = colData[colIdx][rowIdx];\n }\n serializedData.push(row);\n }\n } else if (type === DataFormat.DSV_STR) {\n serializedData = [fields.map(f => f.name()).join(options.fieldSeparator)];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row.join(options.fieldSeparator));\n }\n serializedData = serializedData.join('\\n');\n } else if (type === DataFormat.DSV_ARR) {\n serializedData = [fields.map(f => f.name())];\n for (rowIdx = 0; rowIdx < rowsCount; rowIdx++) {\n const row = [];\n for (colIdx = 0; colIdx < fields.length; colIdx++) {\n row.push(colData[colIdx][rowIdx]);\n }\n serializedData.push(row);\n }\n } else {\n throw new Error(`Data type ${type} is not supported`);\n }\n\n return serializedData;\n }\n\n addField (field) {\n const fieldName = field.name();\n this._colIdentifier += `,${fieldName}`;\n const partialFieldspace = this._partialFieldspace;\n const cachedValueObjects = partialFieldspace._cachedValueObjects;\n const formattedData = field.formattedData();\n const rawData = field.partialField.data;\n\n if (!partialFieldspace.fieldsObj()[field.name()]) {\n partialFieldspace.fields.push(field);\n cachedValueObjects.forEach((obj, i) => {\n obj[field.name()] = new Value(formattedData[i], rawData[i], field);\n });\n } else {\n const fieldIndex = partialFieldspace.fields.findIndex(fieldinst => fieldinst.name() === fieldName);\n fieldIndex >= 0 && (partialFieldspace.fields[fieldIndex] = field);\n }\n\n // flush out cached namespace values on addition of new fields\n partialFieldspace._cachedFieldsObj = null;\n partialFieldspace._cachedDimension = null;\n partialFieldspace._cachedMeasure = null;\n\n this.__calculateFieldspace().calculateFieldsConfig();\n return this;\n }\n\n /**\n * Creates a new variable calculated from existing variables. This method expects the definition of the newly created\n * variable and a function which resolves the value of the new variable from existing variables.\n *\n * Can create a new measure based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const newDm = dataModel.calculateVariable({\n * name: 'powerToWeight',\n * type: 'measure'\n * }, ['horsepower', 'weight_in_lbs', (hp, weight) => hp / weight ]);\n *\n *\n * Can create a new dimension based on existing variables:\n * @example\n * // DataModel already prepared and assigned to dm variable;\n * const child = dataModel.calculateVariable(\n * {\n * name: 'Efficiency',\n * type: 'dimension'\n * }, ['horsepower', (hp) => {\n * if (hp < 80) { return 'low'; },\n * else if (hp < 120) { return 'moderate'; }\n * else { return 'high' }\n * }]);\n *\n * @public\n *\n * @param {Object} schema - The schema of newly defined variable.\n * @param {Array.} dependency - An array containing the dependency variable names and a resolver\n * function as the last element.\n * @param {Object} config - An optional config object.\n * @param {boolean} [config.saveChild] - Whether the newly created DataModel will be a child.\n * @param {boolean} [config.replaceVar] - Whether the newly created variable will replace the existing variable.\n * @return {DataModel} Returns an instance of DataModel with the new field.\n */\n calculateVariable (schema, dependency, config) {\n schema = sanitizeUnitSchema(schema);\n config = Object.assign({}, { saveChild: true, replaceVar: false }, config);\n\n const fieldsConfig = this.getFieldsConfig();\n const depVars = dependency.slice(0, dependency.length - 1);\n const retrieveFn = dependency[dependency.length - 1];\n\n if (fieldsConfig[schema.name] && !config.replaceVar) {\n throw new Error(`${schema.name} field already exists in datamodel`);\n }\n\n const depFieldIndices = depVars.map((field) => {\n const fieldSpec = fieldsConfig[field];\n if (!fieldSpec) {\n // @todo dont throw error here, use warning in production mode\n throw new Error(`${field} is not a valid column name.`);\n }\n return fieldSpec.index;\n });\n\n const clone = this.clone(config.saveChild);\n\n const fs = clone.getFieldspace().fields;\n const suppliedFields = depFieldIndices.map(idx => fs[idx]);\n\n let cachedStore = {};\n let cloneProvider = () => this.detachedRoot();\n\n const computedValues = [];\n rowDiffsetIterator(clone._rowDiffset, (i) => {\n const fieldsData = suppliedFields.map(field => field.partialField.data[i]);\n computedValues[i] = retrieveFn(...fieldsData, i, cloneProvider, cachedStore);\n });\n const [field] = createFields([computedValues], [schema], [schema.name]);\n clone.addField(field);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.CAL_VAR,\n { config: schema, fields: depVars },\n retrieveFn\n );\n\n return clone;\n }\n\n /**\n * Propagates changes across all the connected DataModel instances.\n *\n * @param {Array} identifiers - A list of identifiers that were interacted with.\n * @param {Object} payload - The interaction specific details.\n *\n * @return {DataModel} DataModel instance.\n */\n propagate (identifiers, config = {}, addToNameSpace, propConfig = {}) {\n const isMutableAction = config.isMutableAction;\n const propagationSourceId = config.sourceId;\n const payload = config.payload;\n const rootModel = getRootDataModel(this);\n const propagationNameSpace = rootModel._propagationNameSpace;\n const rootGroupByModel = getRootGroupByModel(this);\n const rootModels = {\n groupByModel: rootGroupByModel,\n model: rootModel\n };\n\n addToNameSpace && addToPropNamespace(propagationNameSpace, config, this);\n propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace,\n sourceId: propagationSourceId,\n propagationSource: this },\n Object.assign({\n payload\n }, config));\n\n if (isMutableAction) {\n propagateImmutableActions(propagationNameSpace, rootModel, {\n config,\n propConfig\n }, this);\n }\n\n return this;\n }\n\n /**\n * Associates a callback with an event name.\n *\n * @param {string} eventName - The name of the event.\n * @param {Function} callback - The callback to invoke.\n * @return {DataModel} Returns this current DataModel instance itself.\n */\n on (eventName, callback) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation.push(callback);\n break;\n }\n return this;\n }\n\n /**\n * Unsubscribes the callbacks for the provided event name.\n *\n * @param {string} eventName - The name of the event to unsubscribe.\n * @return {DataModel} Returns the current DataModel instance itself.\n */\n unsubscribe (eventName) {\n switch (eventName) {\n case PROPAGATION:\n this._onPropagation = [];\n break;\n\n }\n return this;\n }\n\n /**\n * This method is used to invoke the method associated with propagation.\n *\n * @param {Object} payload The interaction payload.\n * @param {DataModel} identifiers The propagated DataModel.\n * @memberof DataModel\n */\n handlePropagation (propModel, payload) {\n let propListeners = this._onPropagation;\n propListeners.forEach(fn => fn.call(this, propModel, payload));\n }\n\n /**\n * Performs the binning operation on a measure field based on the binning configuration. Binning means discretizing\n * values of a measure. Binning configuration contains an array; subsequent values from the array marks the boundary\n * of buckets in [inclusive, exclusive) range format. This operation does not mutate the subject measure field,\n * instead, it creates a new field (variable) of type dimension and subtype binned.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non-uniform buckets,\n * - providing bins count,\n * - providing each bin size,\n *\n * When custom `buckets` are provided as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', buckets: [30, 80, 100, 110] }\n * const binnedDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binsCount` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHP', binsCount: 5, start: 0, end: 100 }\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @text\n * When `binSize` is defined as part of binning configuration:\n * @example\n * // DataModel already prepared and assigned to dm variable\n * const config = { name: 'binnedHorsepower', binSize: 20, start: 5}\n * const binDM = dataModel.bin('horsepower', config);\n *\n * @public\n *\n * @param {string} measureFieldName - The name of the target measure field.\n * @param {Object} config - The config object.\n * @param {string} [config.name] - The name of the new field which will be created.\n * @param {string} [config.buckets] - An array containing the bucket ranges.\n * @param {string} [config.binSize] - The size of each bin. It is ignored when buckets are given.\n * @param {string} [config.binsCount] - The total number of bins to generate. It is ignored when buckets are given.\n * @param {string} [config.start] - The start value of the bucket ranges. It is ignored when buckets are given.\n * @param {string} [config.end] - The end value of the bucket ranges. It is ignored when buckets are given.\n * @return {DataModel} Returns a new {@link DataModel} instance with the new field.\n */\n bin (measureFieldName, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n if (!fieldsConfig[measureFieldName]) {\n throw new Error(`Field ${measureFieldName} doesn't exist`);\n }\n\n const binFieldName = config.name || `${measureFieldName}_binned`;\n\n if (fieldsConfig[binFieldName]) {\n throw new Error(`Field ${binFieldName} already exists`);\n }\n\n const measureField = this.getFieldspace().fieldsObj()[measureFieldName];\n const { binnedData, bins } = createBinnedFieldData(measureField, this._rowDiffset, config);\n\n const binField = createFields([binnedData], [\n {\n name: binFieldName,\n type: FieldType.DIMENSION,\n subtype: DimensionSubtype.BINNED,\n bins\n }], [binFieldName])[0];\n\n const clone = this.clone(config.saveChild);\n clone.addField(binField);\n\n persistDerivations(\n this,\n clone,\n DM_DERIVATIVES.BIN,\n { measureFieldName, config, binFieldName },\n null\n );\n\n return clone;\n }\n\n /**\n * Creates a new {@link DataModel} instance with completely detached root from current {@link DataModel} instance,\n * the new {@link DataModel} instance has no parent-children relationship with the current one, but has same data as\n * the current one.\n * This API is useful when a completely different {@link DataModel} but with same data as the current instance is\n * needed.\n *\n * @example\n * const dm = new DataModel(data, schema);\n * const detachedDm = dm.detachedRoot();\n *\n * // has different namespace\n * console.log(dm.getPartialFieldspace().name);\n * console.log(detachedDm.getPartialFieldspace().name);\n *\n * // has same data\n * console.log(dm.getData());\n * console.log(detachedDm.getData());\n *\n * @public\n *\n * @return {DataModel} Returns a detached {@link DataModel} instance.\n */\n detachedRoot () {\n const data = this.serialize(DataFormat.FLAT_JSON);\n const schema = this.getSchema();\n\n return new DataModel(data, schema);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of rows in the source {@link DataModel}\n * instance based on a set of dimensions.\n *\n * For each unique dimensional value, a new split is created which creates a unique {@link DataModel} instance for\n * that split\n *\n * If multiple dimensions are provided, it splits the source {@link DataModel} instance with all possible\n * combinations of the dimensional values for all the dimensions provided\n *\n * Additionally, it also accepts a predicate function to reduce the set of rows provided. A\n * {@link link_to_selection | Selection} is performed on all the split {@link DataModel} instances based on\n * the predicate function\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByRow(['Origin'])\n * console.log(splitDt));\n * // This should give three unique DataModel instances, one each having rows only for 'USA',\n * // 'Europe' and 'Japan' respectively\n *\n * @example\n * // without predicate function:\n * const splitDtMulti = dt.splitByRow(['Origin', 'Cylinders'])\n * console.log(splitDtMulti));\n * // This should give DataModel instances for all unique combinations of Origin and Cylinder values\n *\n * @example\n * // with predicate function:\n * const splitWithPredDt = dt.select(['Origin'], fields => fields.Origin.value === \"USA\")\n * console.log(splitWithPredDt);\n * // This should not include the DataModel for the Origin : 'USA'\n *\n *\n * @public\n *\n * @param {Array} dimensionArr - Set of dimensions based on which the split should occur\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByRow (dimensionArr, reducerFn, config) {\n const fieldsConfig = this.getFieldsConfig();\n\n dimensionArr.forEach((fieldName) => {\n if (!fieldsConfig[fieldName]) {\n throw new Error(`Field ${fieldName} doesn't exist in the schema`);\n }\n });\n\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n\n config = Object.assign({}, defConfig, config);\n\n return splitWithSelect(this, dimensionArr, reducerFn, config);\n }\n\n /**\n * Creates a set of new {@link DataModel} instances by splitting the set of fields in the source {@link DataModel}\n * instance based on a set of common and unique field names provided.\n *\n * Each DataModel created contains a set of fields which are common to all and a set of unique fields.\n * It also accepts configurations such as saveChild and mode(inverse or normal) to include/exclude the respective\n * fields\n *\n * @example\n * // without predicate function:\n * const splitDt = dt.splitByColumn( [['Acceleration'], ['Horsepower']], ['Origin'])\n * console.log(splitDt));\n * // This should give two unique DataModel instances, both having the field 'Origin' and\n * // one each having 'Acceleration' and 'Horsepower' fields respectively\n *\n * @example\n * // without predicate function:\n * const splitDtInv = dt.splitByColumn( [['Acceleration'], ['Horsepower'],['Origin', 'Cylinders'],\n * {mode: 'inverse'})\n * console.log(splitDtInv));\n * // This should give DataModel instances in the following way:\n * // All DataModel Instances do not have the fields 'Origin' and 'Cylinders'\n * // One DataModel Instance has rest of the fields except 'Acceleration' and the other DataModel instance\n * // has rest of the fields except 'Horsepower'\n *\n *\n *\n * @public\n *\n * @param {Array} uniqueFields - Set of unique fields included in each datamModel instance\n * @param {Array} commonFields - Set of common fields included in all datamModel instances\n * @param {Object} config - The configuration object\n * @param {string} [config.saveChild] - Configuration to save child or not\n * @param {string}[config.mode=FilteringMode.NORMAL] -The mode of the selection.\n * @return {Array} Returns the new DataModel instances after operation.\n */\n splitByColumn (uniqueFields = [], commonFields = [], config) {\n const defConfig = {\n mode: FilteringMode.NORMAL,\n saveChild: true\n };\n const fieldConfig = this.getFieldsConfig();\n const allFields = Object.keys(fieldConfig);\n const normalizedProjFieldSets = [[commonFields]];\n\n config = Object.assign({}, defConfig, config);\n uniqueFields = uniqueFields.length ? uniqueFields : [[]];\n\n\n uniqueFields.forEach((fieldSet, i) => {\n normalizedProjFieldSets[i] = getNormalizedProFields(\n [...fieldSet, ...commonFields],\n allFields,\n fieldConfig);\n });\n\n return splitWithProject(this, normalizedProjFieldSets, config, allFields);\n }\n\n\n}\n\nexport default DataModel;\n","import { fnList } from '../operator/group-by-function';\n\nexport const { sum, avg, min, max, first, last, count, std: sd } = fnList;\n","import DataModel from './datamodel';\nimport {\n compose,\n bin,\n select,\n project,\n groupby as groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n} from './operator';\nimport * as Stats from './stats';\nimport * as enums from './enums';\nimport { DataConverter } from './converter';\nimport { DateTimeFormatter } from './utils';\nimport { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants';\nimport InvalidAwareTypes from './invalid-aware-types';\nimport pkg from '../package.json';\nimport * as FieldsUtility from './fields';\n\nconst Operators = {\n compose,\n bin,\n select,\n project,\n groupBy,\n calculateVariable,\n sort,\n crossProduct,\n difference,\n naturalJoin,\n leftOuterJoin,\n rightOuterJoin,\n fullOuterJoin,\n union,\n rowDiffsetIterator\n};\n\nconst version = pkg.version;\nObject.assign(DataModel, {\n Operators,\n Stats,\n DM_DERIVATIVES,\n DateTimeFormatter,\n DataFormat,\n FilteringMode,\n InvalidAwareTypes,\n version,\n DataConverter,\n FieldsUtility\n}, enums);\n\nexport default DataModel;\n","import { persistDerivations } from '../helper';\nimport { DM_DERIVATIVES } from '../constants';\n\n/**\n * DataModel's opearators are exposed as composable functional operators as well as chainable operators. Chainable\n * operators are called on the instances of {@link Datamodel} and {@link Relation} class.\n *\n * Those same operators can be used as composable operators from `DataModel.Operators` namespace.\n *\n * All these operators have similar behaviour. All these operators when called with the argument returns a function\n * which expects a DataModel instance.\n *\n * @public\n * @module Operators\n * @namespace DataModel\n */\n\n/**\n * This is functional version of selection operator. {@link link_to_selection | Selection} is a row filtering operation.\n * It takes {@link SelectionPredicate | predicate} for filtering criteria and returns a function.\n * The returned function is called with the DataModel instance on which the action needs to be performed.\n *\n * {@link SelectionPredicate} is a function which returns a boolean value. For selection opearation the selection\n * function is called for each row of DataModel instance with the current row passed as argument.\n *\n * After executing {@link SelectionPredicate} the rows are labeled as either an entry of selection set or an entry\n * of rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * [Warn] Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * [Error] `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @example\n * const select = DataModel.Operators.select;\n * usaCarsFn = select(fields => fields.Origin.value === 'USA');\n * usaCarsDm = usaCarsFn(dm);\n * console.log(usaCarsDm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {SelectionPredicate} selectFn - Predicate funciton which is called for each row with the current row\n * ```\n * function (row, i) { ... }\n * ```\n * @param {Object} [config] - The configuration object to control the inclusion exclusion of a row in resultant\n * DataModel instance\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - The mode of the selection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const select = (...args) => dm => dm.select(...args);\n\n/**\n * This is functional version of projection operator. {@link link_to_projection | Projection} is a column filtering\n * operation.It expects list of fields name and either include those or exclude those based on {@link FilteringMode} on\n * the resultant variable.It returns a function which is called with the DataModel instance on which the action needs\n * to be performed.\n *\n * Projection expects array of fields name based on which it creates the selection and rejection set. All the field\n * whose name is present in array goes in selection set and rest of the fields goes in rejection set.\n *\n * {@link FilteringMode} operates on the selection and rejection set to determine which one would reflect in the\n * resulatant datamodel.\n *\n * @warning\n * Selection and rejection set is only a logical idea for concept explanation purpose.\n *\n * @error\n * `FilteringMode.ALL` is not a valid working mode for functional version of `select`. Its only avialable on the\n * chained version.\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} projField - An array of column names in string or regular expression.\n * @param {Object} [config] - An optional config to control the creation of new DataModel\n * @param {FilteringMode} [config.mode=FilteringMode.NORMAL] - Mode of the projection\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const project = (...args) => dm => dm.project(...args);\n\n/**\n * This is functional version of binnig operator. Binning happens on a measure field based on a binning configuration.\n * Binning in DataModel does not aggregate the number of rows present in DataModel instance after binning, it just adds\n * a new field with the binned value. Refer binning {@link example_of_binning | example} to have a intuition of what\n * binning is and the use case.\n *\n * Binning can be configured by\n * - providing custom bin configuration with non uniform buckets\n * - providing bin count\n * - providing each bin size\n *\n * When custom buckets are provided as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const buckets = {\n * start: 30\n * stops: [80, 100, 110]\n * };\n * const config = { buckets, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(dm);\n *\n * @text\n * When `binCount` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binCount: 5, name: 'binnedHP' }\n * const binFn = bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @text\n * When `binSize` is defined as part of binning configuration\n * @example\n * // DataModel already prepared and assigned to dm vairable\n * const config = { binSize: 200, name: 'binnedHorsepower' }\n * const binnedDm = dataModel.bin('horsepower', config);\n * const binnedDm = binFn(Dm);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {String} name Name of measure which will be used to create bin\n * @param {Object} config Config required for bin creation\n * @param {Array.} config.bucketObj.stops Defination of bucket ranges. Two subsequent number from arrays\n * are picked and a range is created. The first number from range is inclusive and the second number from range\n * is exclusive.\n * @param {Number} [config.bucketObj.startAt] Force the start of the bin from a particular number.\n * If not mentioned, the start of the bin or the lower domain of the data if stops is not mentioned, else its\n * the first value of the stop.\n * @param {Number} config.binSize Bucket size for each bin\n * @param {Number} config.binCount Number of bins which will be created\n * @param {String} config.name Name of the new binned field to be created\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const bin = (...args) => dm => dm.bin(...args);\n\n/**\n * This is functional version of `groupBy` operator.Groups the data using particular dimensions and by reducing\n * measures. It expects a list of dimensions using which it projects the datamodel and perform aggregations to reduce\n * the duplicate tuples. Refer this {@link link_to_one_example_with_group_by | document} to know the intuition behind\n * groupBy.\n *\n * DataModel by default provides definition of few {@link reducer | Reducers}.\n * {@link ReducerStore | User defined reducers} can also be registered.\n *\n * This is the chained implementation of `groupBy`.\n * `groupBy` also supports {@link link_to_compose_groupBy | composability}\n *\n * @example\n * const groupBy = DataModel.Operators.groupBy;\n * const groupedFn = groupBy(['Year'], { horsepower: 'max' } );\n * groupedDM = groupByFn(dm);\n *\n * @public\n *\n * @param {Array.} fieldsArr - Array containing the name of dimensions\n * @param {Object} [reducers={}] - A map whose key is the variable name and value is the name of the reducer. If its\n * not passed, or any variable is ommitted from the object, default aggregation function is used from the\n * schema of the variable.\n *\n * @return {PreparatorFunction} Function which expects an instance of DataModel on which the operator needs to be\n * applied.\n */\nexport const groupBy = (...args) => dm => dm.groupBy(...args);\n\n/**\n * Enables composing operators to run multiple operations and save group of operataion as named opration on a DataModel.\n * The resulting DataModel will be the result of all the operation provided. The operations provided will be executed in\n * a serial manner ie. result of one operation will be the input for the next operations (like pipe operator in unix).\n *\n * Suported operations in compose are\n * - `select`\n * - `project`\n * - `groupBy`\n * - `bin`\n * - `compose`\n *\n * @example\n * const compose = DataModel.Operators.compose;\n * const select = DataModel.Operators.select;\n * const project = DataModel.Operators.project;\n *\n * let composedFn = compose(\n * select(fields => fields.netprofit.value <= 15),\n * project(['netprofit', 'netsales']));\n *\n * const dataModel = new DataModel(data1, schema1);\n *\n * let composedDm = composedFn(dataModel);\n *\n * @public\n * @namespace DataModel\n * @module Operators\n *\n * @param {Array.} operators: An array of operation that will be applied on the\n * datatable.\n *\n * @returns {DataModel} Instance of resultant DataModel\n */\nexport const compose = (...operations) =>\n (dm, config = { saveChild: true }) => {\n let currentDM = dm;\n let firstChild;\n const derivations = [];\n\n operations.forEach((operation) => {\n currentDM = operation(currentDM);\n derivations.push(...currentDM._derivation);\n if (!firstChild) {\n firstChild = currentDM;\n }\n });\n\n if (firstChild && firstChild !== currentDM) {\n firstChild.dispose();\n }\n\n // reset all ancestorDerivation saved in-between compose\n currentDM._ancestorDerivation = [];\n persistDerivations(\n dm,\n currentDM,\n DM_DERIVATIVES.COMPOSE,\n null,\n derivations\n );\n\n if (config.saveChild) {\n currentDM.setParent(dm);\n } else {\n currentDM.setParent(null);\n }\n\n return currentDM;\n };\n","/**\n * Wrapper on calculateVariable() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const calculateVariable = (...args) => dm => dm.calculateVariable(...args);\n\n/**\n * Wrapper on sort() method of DataModel to behave\n * the pure-function functionality.\n *\n * @param {Array} args - The argument list.\n * @return {any} Returns the returned value of calling function.\n */\nexport const sort = (...args) => dm => dm.sort(...args);\n","import { crossProduct } from './cross-product';\nimport { naturalJoinFilter } from './natural-join-filter-function';\n\nexport function naturalJoin (dataModel1, dataModel2) {\n return crossProduct(dataModel1, dataModel2, naturalJoinFilter(dataModel1, dataModel2), true);\n}\n"],"sourceRoot":""} \ No newline at end of file diff --git a/example/index.html b/example/index.html index d2d791d..2b4d5ae 100644 --- a/example/index.html +++ b/example/index.html @@ -13,7 +13,7 @@ - + \ No newline at end of file diff --git a/example/samples/example2.js b/example/samples/example2.js index 0fd3bd2..fd84ae6 100644 --- a/example/samples/example2.js +++ b/example/samples/example2.js @@ -1,4 +1,59 @@ -const DataModel = window.DataModel.default; +// const DataModel = window.DataModel.default; +const columnMajor = (store) => { + let i = 0; + return (...fields) => { + fields.forEach((val, fieldIndex) => { + if (!(store[fieldIndex] instanceof Array)) { + store[fieldIndex] = Array.from({ length: i }); + } + store[fieldIndex].push(val); + }); + i++; + }; +}; + + +function FlatJSON222 (arr, schema) { + if (!Array.isArray(schema)) { + throw new Error('Schema missing or is in an unsupported format'); + } + + const header = {}; + let i = 0; + let insertionIndex; + const columns = []; + const push = columnMajor(columns); + const schemaFieldsName = schema.map(unitSchema => unitSchema.name); + + arr.forEach((item) => { + const fields = []; + schemaFieldsName.forEach((unitSchema) => { + if (unitSchema in header) { + insertionIndex = header[unitSchema]; + } else { + header[unitSchema] = i++; + insertionIndex = i - 1; + } + fields[insertionIndex] = item[unitSchema]; + }); + push(...fields); + }); + + return [Object.keys(header), columns]; +} + +class JSONConverter2 extends DataModel.DataConverter{ + constructor(){ + super("json2") + } + + convert(data , schema , options){ + console.log("this is json2") + return FlatJSON222(data,schema,options); + } +} + +DataModel.Converters.register(new JSONConverter2()); const schema = [ { @@ -50,13 +105,6 @@ const data = [ } ]; -const dm = new DataModel(data, schema); -const dataGenerated = dm.getData({ - order: 'column', - formatter: { - birthday: val => new Date(val), - name: val => `Name: ${val}` - } -}); +const dm = new DataModel(data, schema,{ dataFormat:"json2" }); -console.log(dataGenerated); +console.log(dm.getData()); diff --git a/package.json b/package.json index 39a1036..f16e55f 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "datamodel", "description": "Relational algebra compliant in-memory tabular data store", "homepage": "https://github.com/chartshq/datamodel", - "version": "2.2.3", + "version": "2.3.0", "license": "MIT", "main": "dist/datamodel.js", "keywords": [ diff --git a/src/converter/dataConverterStore.js b/src/converter/dataConverterStore.js new file mode 100644 index 0000000..96de471 --- /dev/null +++ b/src/converter/dataConverterStore.js @@ -0,0 +1,72 @@ +import DataConverter from './model/dataConverter'; +import { DSVStringConverter, DSVArrayConverter, JSONConverter, AutoDataConverter } from './defaultConverters'; + +class DataConverterStore { + constructor() { + this.store = new Map(); + this.converters(this._getDefaultConverters()); + } + + _getDefaultConverters() { + return [ + new DSVStringConverter(), + new DSVArrayConverter(), + new JSONConverter(), + new AutoDataConverter() + ]; + } + + /** + * Sets the given converters in the store and returns the store + * @param {Array} converters : contains array of converter instance + * @return { Map } + */ + converters(converters = []) { + converters.forEach(converter => this.store.set(converter.type, converter)); + return this.store; + } + + /** + * Registers a Converter of type DataConverter + * @param {DataConverter} converter : converter Instance + * @returns self + */ + register(converter) { + if (converter instanceof DataConverter) { + this.store.set(converter.type, converter); + return this; + } + return null; + } + + /** + * Rempves a converter from store + * @param {DataConverter} converter : converter Instance + * @returns self + */ + + unregister(converter) { + this.store.delete(converter.type); + return this; + } + + get(name) { + if (this.store.has(name)) { + return this.store.get(name); + } + return null; + } + +} + +const converterStore = (function () { + let store = null; + + function getStore () { + store = new DataConverterStore(); + return store; + } + return store || getStore(); +}()); + +export default converterStore; diff --git a/src/converter/dataConverterStore.spec.js b/src/converter/dataConverterStore.spec.js new file mode 100644 index 0000000..89966fa --- /dev/null +++ b/src/converter/dataConverterStore.spec.js @@ -0,0 +1,31 @@ +/* global describe, it */ +/* eslint-disable no-unused-expressions */ + +import { expect } from 'chai'; +import converterStore from './dataConverterStore'; +import DataConverter from '../converter/model/dataConverter'; + +describe('#DataConverterStore', () => { + it('should register and unregister converter', () => { + class JSONConverter2 extends DataConverter { + constructor() { + super('json2'); + } + + convert() { + return ''; + } + } + + const converter = new JSONConverter2(); + converterStore.register(converter); + expect(converterStore.get('json2')).to.not.null; + + converterStore.unregister(converter); + expect(converterStore.get('json2')).to.null; + }); + + it('should not register invalid Coverter', () => { + expect(converterStore.register(() => {})).to.null; + }); +}); diff --git a/src/converter/defaultConverters/autoConverter.js b/src/converter/defaultConverters/autoConverter.js new file mode 100644 index 0000000..1777377 --- /dev/null +++ b/src/converter/defaultConverters/autoConverter.js @@ -0,0 +1,13 @@ +import DataConverter from '../model/dataConverter'; +import AUTO from '../utils/auto-resolver'; +import DataFormat from '../../enums/data-format'; + +export default class AutoDataConverter extends DataConverter { + constructor() { + super(DataFormat.AUTO); + } + + convert(data, schema, options) { + return AUTO(data, schema, options); + } +} diff --git a/src/converter/defaultConverters/dsvArrayConverter.js b/src/converter/defaultConverters/dsvArrayConverter.js new file mode 100644 index 0000000..9dd2db1 --- /dev/null +++ b/src/converter/defaultConverters/dsvArrayConverter.js @@ -0,0 +1,13 @@ +import DataConverter from '../model/dataConverter'; +import DSVArr from '../utils/dsv-arr'; +import DataFormat from '../../enums/data-format'; + +export default class DSVArrayConverter extends DataConverter { + constructor() { + super(DataFormat.DSV_ARR); + } + + convert(data, schema, options) { + return DSVArr(data, schema, options); + } +} diff --git a/src/converter/defaultConverters/dsvStringConverter.js b/src/converter/defaultConverters/dsvStringConverter.js new file mode 100644 index 0000000..fa052a2 --- /dev/null +++ b/src/converter/defaultConverters/dsvStringConverter.js @@ -0,0 +1,13 @@ +import DataConverter from '../model/dataConverter'; +import DSVStr from '../utils/dsv-str'; +import DataFormat from '../../enums/data-format'; + +export default class DSVStringConverter extends DataConverter { + constructor() { + super(DataFormat.DSV_STR); + } + + convert(data, schema, options) { + return DSVStr(data, schema, options); + } +} diff --git a/src/converter/defaultConverters/index.js b/src/converter/defaultConverters/index.js new file mode 100644 index 0000000..fdcf21f --- /dev/null +++ b/src/converter/defaultConverters/index.js @@ -0,0 +1,4 @@ +export { default as DSVStringConverter } from './dsvStringConverter'; +export { default as JSONConverter } from './jsonConverter'; +export { default as DSVArrayConverter } from './dsvArrayConverter'; +export { default as AutoDataConverter } from './autoConverter'; diff --git a/src/converter/defaultConverters/jsonConverter.js b/src/converter/defaultConverters/jsonConverter.js new file mode 100644 index 0000000..e4b1d2e --- /dev/null +++ b/src/converter/defaultConverters/jsonConverter.js @@ -0,0 +1,13 @@ +import DataConverter from '../model/dataConverter'; +import FlatJSON from '../utils/flat-json'; +import DataFormat from '../../enums/data-format'; + +export default class JSONConverter extends DataConverter { + constructor() { + super(DataFormat.FLAT_JSON); + } + + convert(data, schema, options) { + return FlatJSON(data, schema, options); + } +} diff --git a/src/converter/defaultConverters/jsonConverter.spec.js b/src/converter/defaultConverters/jsonConverter.spec.js new file mode 100644 index 0000000..c398456 --- /dev/null +++ b/src/converter/defaultConverters/jsonConverter.spec.js @@ -0,0 +1,56 @@ +/* global describe, it ,beforeEach */ +/* eslint-disable no-unused-expressions */ + +import { expect } from 'chai'; +import JSONConverter from './jsonConverter'; + +describe('JSON Converter', () => { + let data; + let converter = new JSONConverter(); + beforeEach(() => { + data = [ + { + a: 1, + b: 2, + c: 3 + }, + { + a: 4, + b: 5, + c: 6 + }, + { + a: 7, + b: 8, + c: 9 + } + ]; + }); + + describe('#JSON', () => { + it('should convert to JSON data', () => { + const schema = [ + { + name: 'a', + type: 'measure', + subtype: 'continuous' + }, + { + name: 'b', + type: 'measure', + subtype: 'continuous' + }, + { + name: 'c', + type: 'measure', + subtype: 'continuous' + } + ]; + + const parsedData = converter.convert(data, schema); + const expected = [['a', 'b', 'c'], [[1, 4, 7], [2, 5, 8], [3, 6, 9]]]; + + expect(parsedData).to.deep.equal(expected); + }); + }); +}); diff --git a/src/converter/index.js b/src/converter/index.js index c5cc212..135b30a 100644 --- a/src/converter/index.js +++ b/src/converter/index.js @@ -1,4 +1,4 @@ -export { default as DSVArr } from './dsv-arr'; -export { default as DSVStr } from './dsv-str'; -export { default as FlatJSON } from './flat-json'; -export { default as Auto } from './auto-resolver'; +import converterStore from './dataConverterStore'; +import DataConverter from './model/dataConverter'; + +export { DataConverter, converterStore }; diff --git a/src/converter/model/dataConverter.js b/src/converter/model/dataConverter.js new file mode 100644 index 0000000..0aa9385 --- /dev/null +++ b/src/converter/model/dataConverter.js @@ -0,0 +1,17 @@ +/** + * Interface for all data converters + */ +export default class DataConverter { + constructor(type) { + this._type = type; + } + + get type() { + return this._type; + } + + convert() { + throw new Error('Convert method not implemented.'); + } + +} diff --git a/src/converter/model/dataConverter.spec.js b/src/converter/model/dataConverter.spec.js new file mode 100644 index 0000000..cabd996 --- /dev/null +++ b/src/converter/model/dataConverter.spec.js @@ -0,0 +1,11 @@ +/* global describe, it */ +/* eslint-disable no-unused-expressions */ + +import { expect } from 'chai'; +import DataConverter from './dataConverter'; + +describe('#DataConverterModel', () => { + it('should throw error', () => { + expect(new DataConverter().convert).to.throw(Error, 'Convert method not implemented'); + }); +}); diff --git a/src/converter/auto-resolver.js b/src/converter/utils/auto-resolver.js similarity index 93% rename from src/converter/auto-resolver.js rename to src/converter/utils/auto-resolver.js index 2dc6ee2..7453858 100644 --- a/src/converter/auto-resolver.js +++ b/src/converter/utils/auto-resolver.js @@ -1,7 +1,7 @@ import FlatJSON from './flat-json'; import DSVArr from './dsv-arr'; import DSVStr from './dsv-str'; -import { detectDataFormat } from '../utils'; +import { detectDataFormat } from '../../utils'; /** * Parses the input data and detect the format automatically. diff --git a/src/converter/auto-resolver.spec.js b/src/converter/utils/auto-resolver.spec.js similarity index 100% rename from src/converter/auto-resolver.spec.js rename to src/converter/utils/auto-resolver.spec.js diff --git a/src/converter/dsv-arr.js b/src/converter/utils/dsv-arr.js similarity index 97% rename from src/converter/dsv-arr.js rename to src/converter/utils/dsv-arr.js index 9366fa5..130096a 100644 --- a/src/converter/dsv-arr.js +++ b/src/converter/utils/dsv-arr.js @@ -1,4 +1,4 @@ -import { columnMajor } from '../utils'; +import { columnMajor } from '../../utils'; /** * Parses and converts data formatted in DSV array to a manageable internal format. diff --git a/src/converter/dsv-arr.spec.js b/src/converter/utils/dsv-arr.spec.js similarity index 100% rename from src/converter/dsv-arr.spec.js rename to src/converter/utils/dsv-arr.spec.js diff --git a/src/converter/dsv-str.js b/src/converter/utils/dsv-str.js similarity index 100% rename from src/converter/dsv-str.js rename to src/converter/utils/dsv-str.js diff --git a/src/converter/dsv-str.spec.js b/src/converter/utils/dsv-str.spec.js similarity index 100% rename from src/converter/dsv-str.spec.js rename to src/converter/utils/dsv-str.spec.js diff --git a/src/converter/flat-json.js b/src/converter/utils/flat-json.js similarity index 96% rename from src/converter/flat-json.js rename to src/converter/utils/flat-json.js index 14f0bc8..c76ef9d 100644 --- a/src/converter/flat-json.js +++ b/src/converter/utils/flat-json.js @@ -1,4 +1,4 @@ -import { columnMajor } from '../utils'; +import { columnMajor } from '../../utils'; /** * Parses and converts data formatted in JSON to a manageable internal format. diff --git a/src/converter/flat-json.spec.js b/src/converter/utils/flat-json.spec.js similarity index 100% rename from src/converter/flat-json.spec.js rename to src/converter/utils/flat-json.spec.js diff --git a/src/create-fields.spec.js b/src/create-fields.spec.js index b0ea799..c7a4fbd 100644 --- a/src/create-fields.spec.js +++ b/src/create-fields.spec.js @@ -2,16 +2,19 @@ /* eslint-disable no-unused-expressions */ import { expect } from 'chai'; -import { Categorical, Temporal, Binned, Continuous } from './fields'; +import Categorical from './fields/categorical'; +import Temporal from './fields/temporal'; +import Binned from './fields/binned'; +import Continuous from './fields/continuous'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import { MeasureSubtype, DimensionSubtype } from './enums'; +import PartialField from './fields/partial-field'; describe('Creating Field', () => { describe('#createUnitFieldFromPartial', () => { it('should return an array of correct field instances', () => { - let mockedPartialField = { - schema: { name: 'Country' } - }; + let mockedPartialField = new PartialField('Country', [], { name: 'Country' }, null); + // mockedPartialField.schema = { name: 'Country' } let mockedRowDiffset = '1-2'; expect(createUnitFieldFromPartial(mockedPartialField, mockedRowDiffset) instanceof Categorical).to.be.true; diff --git a/src/datamodel.js b/src/datamodel.js index 31e986e..e362e0b 100644 --- a/src/datamodel.js +++ b/src/datamodel.js @@ -25,6 +25,8 @@ import reducerStore from './utils/reducer-store'; import { createFields } from './field-creator'; import InvalidAwareTypes from './invalid-aware-types'; import Value from './value'; +import { converterStore } from './converter'; +import { fieldRegistry } from './fields'; /** * DataModel is an in-browser representation of tabular data. It supports @@ -94,6 +96,20 @@ class DataModel extends Relation { return reducerStore; } + /** + * Converters are functions that transforms data in various format tpo datamodel consumabe format. + */ + static get Converters() { + return converterStore; + } + + /** + * Register new type of fields + */ + static get FieldTypes() { + return fieldRegistry; + } + /** * Configure null, undefined, invalid values in the source data * @@ -558,13 +574,15 @@ class DataModel extends Relation { }; addToNameSpace && addToPropNamespace(propagationNameSpace, config, this); - propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, sourceId: propagationSourceId }, + propagateToAllDataModels(identifiers, rootModels, { propagationNameSpace, + sourceId: propagationSourceId, + propagationSource: this }, Object.assign({ payload }, config)); if (isMutableAction) { - propagateImmutableActions(propagationNameSpace, rootModels, { + propagateImmutableActions(propagationNameSpace, rootModel, { config, propConfig }, this); diff --git a/src/enums/dimension-subtype.js b/src/enums/dimension-subtype.js index 70c4da3..9a9f17b 100644 --- a/src/enums/dimension-subtype.js +++ b/src/enums/dimension-subtype.js @@ -7,7 +7,6 @@ const DimensionSubtype = { CATEGORICAL: 'categorical', TEMPORAL: 'temporal', - GEO: 'geo', BINNED: 'binned' }; diff --git a/src/export.js b/src/export.js index ed7d7c6..7767443 100644 --- a/src/export.js +++ b/src/export.js @@ -13,14 +13,17 @@ import { leftOuterJoin, rightOuterJoin, fullOuterJoin, - union + union, + rowDiffsetIterator } from './operator'; import * as Stats from './stats'; import * as enums from './enums'; +import { DataConverter } from './converter'; import { DateTimeFormatter } from './utils'; import { DataFormat, FilteringMode, DM_DERIVATIVES } from './constants'; import InvalidAwareTypes from './invalid-aware-types'; import pkg from '../package.json'; +import * as FieldsUtility from './fields'; const Operators = { compose, @@ -36,7 +39,8 @@ const Operators = { leftOuterJoin, rightOuterJoin, fullOuterJoin, - union + union, + rowDiffsetIterator }; const version = pkg.version; @@ -48,7 +52,9 @@ Object.assign(DataModel, { DataFormat, FilteringMode, InvalidAwareTypes, - version + version, + DataConverter, + FieldsUtility }, enums); export default DataModel; diff --git a/src/field-creator.js b/src/field-creator.js index 6d69eac..06f6fa6 100644 --- a/src/field-creator.js +++ b/src/field-creator.js @@ -1,15 +1,5 @@ import { FieldType, DimensionSubtype, MeasureSubtype } from './enums'; -import { - Categorical, - Temporal, - Binned, - Continuous, - CategoricalParser, - TemporalParser, - BinnedParser, - ContinuousParser, - PartialField -} from './fields'; +import { fieldRegistry } from './fields'; /** * Creates a field instance according to the provided data and schema. @@ -20,37 +10,24 @@ import { */ function createUnitField(data, schema) { data = data || []; - let partialField; - switch (schema.type) { - case FieldType.MEASURE: - switch (schema.subtype) { - case MeasureSubtype.CONTINUOUS: - partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); - return new Continuous(partialField, `0-${data.length - 1}`); - default: - partialField = new PartialField(schema.name, data, schema, new ContinuousParser()); - return new Continuous(partialField, `0-${data.length - 1}`); - } - case FieldType.DIMENSION: - switch (schema.subtype) { - case DimensionSubtype.CATEGORICAL: - partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); - return new Categorical(partialField, `0-${data.length - 1}`); - case DimensionSubtype.TEMPORAL: - partialField = new PartialField(schema.name, data, schema, new TemporalParser(schema)); - return new Temporal(partialField, `0-${data.length - 1}`); - case DimensionSubtype.BINNED: - partialField = new PartialField(schema.name, data, schema, new BinnedParser()); - return new Binned(partialField, `0-${data.length - 1}`); - default: - partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); - return new Categorical(partialField, `0-${data.length - 1}`); - } - default: - partialField = new PartialField(schema.name, data, schema, new CategoricalParser()); - return new Categorical(partialField, `0-${data.length - 1}`); + if (fieldRegistry.has(schema.subtype)) { + return fieldRegistry.get(schema.subtype) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build(); } + return fieldRegistry + .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) + .BUILDER + .fieldName(schema.name) + .schema(schema) + .data(data) + .rowDiffset(`0-${data.length - 1}`) + .build(); } @@ -64,28 +41,19 @@ function createUnitField(data, schema) { export function createUnitFieldFromPartial(partialField, rowDiffset) { const { schema } = partialField; - switch (schema.type) { - case FieldType.MEASURE: - switch (schema.subtype) { - case MeasureSubtype.CONTINUOUS: - return new Continuous(partialField, rowDiffset); - default: - return new Continuous(partialField, rowDiffset); - } - case FieldType.DIMENSION: - switch (schema.subtype) { - case DimensionSubtype.CATEGORICAL: - return new Categorical(partialField, rowDiffset); - case DimensionSubtype.TEMPORAL: - return new Temporal(partialField, rowDiffset); - case DimensionSubtype.BINNED: - return new Binned(partialField, rowDiffset); - default: - return new Categorical(partialField, rowDiffset); - } - default: - return new Categorical(partialField, rowDiffset); + if (fieldRegistry.has(schema.subtype)) { + return fieldRegistry.get(schema.subtype) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build(); } + return fieldRegistry + .get(schema.type === FieldType.MEASURE ? MeasureSubtype.CONTINUOUS : DimensionSubtype.CATEGORICAL) + .BUILDER + .partialField(partialField) + .rowDiffset(rowDiffset) + .build(); } /** diff --git a/src/fields/binned/index.js b/src/fields/binned/index.js index 04d1fbf..8438416 100644 --- a/src/fields/binned/index.js +++ b/src/fields/binned/index.js @@ -1,4 +1,5 @@ import Dimension from '../dimension'; +import BinnedParser from '../parsers/binned-parser'; /** * Represents binned field subtype. @@ -29,4 +30,8 @@ export default class Binned extends Dimension { bins () { return this.partialField.schema.bins; } + + static parser() { + return new BinnedParser(); + } } diff --git a/src/fields/categorical/index.js b/src/fields/categorical/index.js index 267cbfc..e62f50d 100644 --- a/src/fields/categorical/index.js +++ b/src/fields/categorical/index.js @@ -1,6 +1,7 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { DimensionSubtype } from '../../enums'; import Dimension from '../dimension'; +import CategoricalParser from '../parsers/categorical-parser'; /** * Represents categorical field subtype. * @@ -41,4 +42,8 @@ export default class Categorical extends Dimension { }); return domain; } + + static parser() { + return new CategoricalParser(); + } } diff --git a/src/fields/continuous/index.js b/src/fields/continuous/index.js index d0f68b3..d83ca6d 100644 --- a/src/fields/continuous/index.js +++ b/src/fields/continuous/index.js @@ -1,7 +1,7 @@ -import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import { MeasureSubtype } from '../../enums'; import Measure from '../measure'; -import InvalidAwareTypes from '../../invalid-aware-types'; +import ContinuousParser from '../parsers/continuous-parser'; +import { calculateContinuousDomain } from '../helper'; /** * Represents continuous field subtype. @@ -30,24 +30,10 @@ export default class Continuous extends Measure { * @return {Array} Returns the min and max values. */ calculateDataDomain () { - let min = Number.POSITIVE_INFINITY; - let max = Number.NEGATIVE_INFINITY; - - // here don't use this.data() as the iteration will be occurred two times on same data. - rowDiffsetIterator(this.rowDiffset, (i) => { - const datum = this.partialField.data[i]; - if (datum instanceof InvalidAwareTypes) { - return; - } - - if (datum < min) { - min = datum; - } - if (datum > max) { - max = datum; - } - }); + return calculateContinuousDomain(this.partialField.data, this.rowDiffset); + } - return [min, max]; + static parser() { + return new ContinuousParser(); } } diff --git a/src/fields/field-registry.js b/src/fields/field-registry.js new file mode 100644 index 0000000..984f492 --- /dev/null +++ b/src/fields/field-registry.js @@ -0,0 +1,46 @@ +import Categorical from './categorical'; +import Temporal from './temporal'; +import Binned from './binned'; +import Continuous from './continuous'; +import { DimensionSubtype, MeasureSubtype } from '../enums'; + + +class FieldTypeRegistry { + constructor() { + this._fieldType = new Map(); + } + + registerFieldType(subtype, dimension) { + this._fieldType.set(subtype, dimension); + return this; + } + + has(type) { + return this._fieldType.has(type); + } + + get(type) { + return this._fieldType.get(type); + } +} + +const registerDefaultFields = (store) => { + store + .registerFieldType(DimensionSubtype.CATEGORICAL, Categorical) + .registerFieldType(DimensionSubtype.TEMPORAL, Temporal) + .registerFieldType(DimensionSubtype.BINNED, Binned) + .registerFieldType(MeasureSubtype.CONTINUOUS, Continuous); +}; + +const fieldRegistry = (function () { + let store = null; + function getStore () { + store = new FieldTypeRegistry(); + registerDefaultFields(store); + return store; + } + return store || getStore(); +}()); + +export default fieldRegistry; + diff --git a/src/fields/field-registry.spec.js b/src/fields/field-registry.spec.js new file mode 100644 index 0000000..e69de29 diff --git a/src/fields/field/index.js b/src/fields/field/index.js index 267afb0..beb7294 100644 --- a/src/fields/field/index.js +++ b/src/fields/field/index.js @@ -1,4 +1,5 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; +import PartialField from '../partial-field'; /** * In {@link DataModel}, every tabular data consists of column, a column is stored as field. @@ -32,6 +33,10 @@ export default class Field { this.rowDiffset = rowDiffset; } + static parser() { + throw new Error('Not yet implemented'); + } + /** * Generates the field type specific domain. * @@ -125,4 +130,47 @@ export default class Field { formattedData () { throw new Error('Not yet implemented'); } + + static get BUILDER() { + const builder = { + _params: {}, + _context: this, + fieldName(name) { + this._params.name = name; + return this; + }, + schema(schema) { + this._params.schema = schema; + return this; + }, + data(data) { + this._params.data = data; + return this; + }, + partialField(partialField) { + this._params.partialField = partialField; + return this; + }, + rowDiffset(rowDiffset) { + this._params.rowDiffset = rowDiffset; + return this; + }, + build() { + let partialField = null; + if (this._params.partialField instanceof PartialField) { + partialField = this._params.partialField; + } else if (this._params.schema && this._params.data) { + partialField = new PartialField(this._params.name, + this._params.data, + this._params.schema, + this._context.parser()); + } + else { + throw new Error('Invalid Field parameters'); + } + return new this._context(partialField, this._params.rowDiffset); + } + }; + return builder; + } } diff --git a/src/fields/helper.js b/src/fields/helper.js new file mode 100644 index 0000000..9ed11eb --- /dev/null +++ b/src/fields/helper.js @@ -0,0 +1,24 @@ +import { rowDiffsetIterator } from '../operator/row-diffset-iterator'; +import InvalidAwareTypes from '../invalid-aware-types'; + +export const calculateContinuousDomain = (data, rowDiffset) => { + let min = Number.POSITIVE_INFINITY; + let max = Number.NEGATIVE_INFINITY; + + // here don't use this.data() as the iteration will be occurred two times on same data. + rowDiffsetIterator(rowDiffset, (i) => { + const datum = data[i]; + if (datum instanceof InvalidAwareTypes) { + return; + } + + if (datum < min) { + min = datum; + } + if (datum > max) { + max = datum; + } + }); + + return [min, max]; +}; diff --git a/src/fields/index.js b/src/fields/index.js index fbe76bf..c62b24f 100644 --- a/src/fields/index.js +++ b/src/fields/index.js @@ -1,13 +1,5 @@ -export { default as Field } from './field'; export { default as Dimension } from './dimension'; -export { default as Categorical } from './categorical'; -export { default as Temporal } from './temporal'; -export { default as Binned } from './binned'; export { default as Measure } from './measure'; -export { default as Continuous } from './continuous'; export { default as FieldParser } from './parsers/field-parser'; -export { default as CategoricalParser } from './parsers/categorical-parser'; -export { default as TemporalParser } from './parsers/temporal-parser'; -export { default as BinnedParser } from './parsers/binned-parser'; -export { default as ContinuousParser } from './parsers/continuous-parser'; -export { default as PartialField } from './partial-field'; +export { default as fieldRegistry } from './field-registry'; +export { columnMajor } from '../utils'; diff --git a/src/fields/parsers/temporal-parser/index.js b/src/fields/parsers/temporal-parser/index.js index 0c02894..81fef24 100644 --- a/src/fields/parsers/temporal-parser/index.js +++ b/src/fields/parsers/temporal-parser/index.js @@ -10,17 +10,6 @@ import InvalidAwareTypes from '../../../invalid-aware-types'; * @implements {FieldParser} */ export default class TemporalParser extends FieldParser { - /** - * Initialize a new instance. - * - * @public - * @param {Object} schema - The schema object for the corresponding field. - */ - constructor (schema) { - super(); - this.schema = schema; - this._dtf = new DateTimeFormatter(this.schema.format); - } /** * Parses a single value of a field and returns the millisecond value. @@ -29,9 +18,12 @@ export default class TemporalParser extends FieldParser { * @param {string|number} val - The value of the field. * @return {number} Returns the millisecond value. */ - parse (val) { + parse (val, { format }) { let result; // check if invalid date value + if (!this._dtf) { + this._dtf = new DateTimeFormatter(format); + } if (!InvalidAwareTypes.isInvalid(val)) { let nativeDate = this._dtf.getNativeDate(val); result = nativeDate ? nativeDate.getTime() : InvalidAwareTypes.NA; diff --git a/src/fields/parsers/temporal-parser/index.spec.js b/src/fields/parsers/temporal-parser/index.spec.js index 5805cf6..728eeaf 100644 --- a/src/fields/parsers/temporal-parser/index.spec.js +++ b/src/fields/parsers/temporal-parser/index.spec.js @@ -17,37 +17,37 @@ describe('TemporalParser', () => { let temParser; beforeEach(() => { - temParser = new TemporalParser(schema); + temParser = new TemporalParser(); }); describe('#parse', () => { it('should return milliseconds for the formatted value', () => { const dateStr = '2017-03-01'; const expectedTs = new DateTimeFormatter(schema.format).getNativeDate(dateStr).getTime(); - expect(temParser.parse(dateStr)).to.equal(expectedTs); + expect(temParser.parse(dateStr, { format: schema.format })).to.equal(expectedTs); }); it('should bypass to Date API when format is not present', () => { const val = 1540629018697; - temParser = new TemporalParser(Object.assign({}, schema, { format: undefined })); - expect(temParser.parse(val)).to.equal(+new Date(val)); + temParser = new TemporalParser(); + expect(temParser.parse(val, { format: undefined })).to.equal(+new Date(val)); }); it('should return default invalid type for invalid value', () => { - expect(temParser.parse(null)).to.eql(DataModel.InvalidAwareTypes.NULL); - expect(temParser.parse(undefined)).to.equal(DataModel.InvalidAwareTypes.NA); - expect(temParser.parse('abcd')).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse(null, { format: schema.format })).to.eql(DataModel.InvalidAwareTypes.NULL); + expect(temParser.parse(undefined, { format: schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('abcd', { format: schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); }); it('should return valid date for edge case', () => { - expect(temParser.parse('')).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('', { format: schema.format })).to.equal(DataModel.InvalidAwareTypes.NA); - temParser = new TemporalParser(Object.assign({}, schema, { format: '%Y' })); - expect(temParser.parse('1998')).to.equal(new Date(1998, 0, 1).getTime()); + temParser = new TemporalParser(); + expect(temParser.parse('1998', { format: '%Y' })).to.equal(new Date(1998, 0, 1).getTime()); - temParser = new TemporalParser(Object.assign({}, schema, { format: '%y' })); - expect(temParser.parse('98')).to.equal(new Date(1998, 0, 1).getTime()); + temParser = new TemporalParser(); + expect(temParser.parse('98', { format: '%y' })).to.equal(new Date(1998, 0, 1).getTime()); - expect(temParser.parse('abcd')).to.equal(DataModel.InvalidAwareTypes.NA); + expect(temParser.parse('abcd', { format: '%y' })).to.equal(DataModel.InvalidAwareTypes.NA); }); }); }); diff --git a/src/fields/partial-field/index.js b/src/fields/partial-field/index.js index 2795f92..a31457c 100644 --- a/src/fields/partial-field/index.js +++ b/src/fields/partial-field/index.js @@ -31,6 +31,6 @@ export default class PartialField { * @return {Array} Returns the sanitized data. */ _sanitize (data) { - return data.map(datum => this.parser.parse(datum)); + return data.map(datum => this.parser.parse(datum, { format: this.schema.format })); } } diff --git a/src/fields/partial-field/index.spec.js b/src/fields/partial-field/index.spec.js index 7f286d7..49edf06 100644 --- a/src/fields/partial-field/index.spec.js +++ b/src/fields/partial-field/index.spec.js @@ -19,7 +19,7 @@ describe('PartialField', () => { let temParser; beforeEach(() => { - temParser = new TemporalParser(schema); + temParser = new TemporalParser(); partField = new PartialField(schema.name, data, schema, temParser); }); @@ -31,7 +31,7 @@ describe('PartialField', () => { }); it('should sanitize the input data before use', () => { - const expected = data.map(d => temParser.parse(d)); + const expected = data.map(d => temParser.parse(d, { format: schema.format })); expect(partField.data).to.eql(expected); }); }); diff --git a/src/fields/temporal/index.js b/src/fields/temporal/index.js index 0b3c540..4fcdd2b 100644 --- a/src/fields/temporal/index.js +++ b/src/fields/temporal/index.js @@ -2,6 +2,8 @@ import { rowDiffsetIterator } from '../../operator/row-diffset-iterator'; import Dimension from '../dimension'; import { DateTimeFormatter } from '../../utils'; import InvalidAwareTypes from '../../invalid-aware-types'; +import TemporalParser from '../parsers/temporal-parser'; +import { calculateContinuousDomain } from '../helper'; /** * Represents temporal field subtype. @@ -32,20 +34,7 @@ export default class Temporal extends Dimension { * @return {Array} Returns the unique values. */ calculateDataDomain () { - const hash = new Set(); - const domain = []; - - // here don't use this.data() as the iteration will be - // occurred two times on same data. - rowDiffsetIterator(this.rowDiffset, (i) => { - const datum = this.partialField.data[i]; - if (!hash.has(datum)) { - hash.add(datum); - domain.push(datum); - } - }); - - return domain; + return calculateContinuousDomain(this.partialField.data, this.rowDiffset); } @@ -121,5 +110,9 @@ export default class Temporal extends Dimension { }); return data; } + + static parser() { + return new TemporalParser(); + } } diff --git a/src/fields/temporal/index.spec.js b/src/fields/temporal/index.spec.js index 86afd4d..f66bf80 100644 --- a/src/fields/temporal/index.spec.js +++ b/src/fields/temporal/index.spec.js @@ -34,7 +34,6 @@ describe('Temporal', () => { it('should return the field domain', () => { const expected = [ new Date(2017, 3 - 1, 1).getTime(), - new Date(2017, 3 - 1, 2).getTime(), new Date(2017, 3 - 1, 3).getTime() ]; expect(tempField.calculateDataDomain()).to.eql(expected); @@ -49,9 +48,7 @@ describe('Temporal', () => { const expected = [ new Date(2017, 3 - 1, 2).getTime(), - new Date(2017, 3 - 1, 3).getTime(), - new Date(2019, 11 - 1, 7).getTime(), - DataModel.InvalidAwareTypes.NULL + new Date(2019, 11 - 1, 7).getTime() ]; expect(tempField.calculateDataDomain()).to.eql(expected); }); diff --git a/src/helper.js b/src/helper.js index 2da2f26..ef85a76 100644 --- a/src/helper.js +++ b/src/helper.js @@ -4,10 +4,11 @@ import Value from './value'; import { rowDiffsetIterator } from './operator'; -import { DM_DERIVATIVES, LOGICAL_OPERATORS } from './constants'; +import { DM_DERIVATIVES, LOGICAL_OPERATORS, ROW_ID } from './constants'; import { createFields, createUnitFieldFromPartial } from './field-creator'; import defaultConfig from './default-config'; -import * as converter from './converter'; +import { converterStore } from './converter'; +import { fieldRegistry } from './fields'; import { extend2, detectDataFormat } from './utils'; /** @@ -144,12 +145,10 @@ export const selectHelper = (clonedDm, selectFn, config, sourceDm, iterator) => let cloneProvider = () => sourceDm.detachedRoot(); const { mode } = config; const rowDiffset = clonedDm._rowDiffset; - const fields = clonedDm.getPartialFieldspace().fields; - const formattedFieldsData = fields.map(field => field.formattedData()); - const rawFieldsData = fields.map(field => field.data()); + const cachedValueObjects = clonedDm._partialFieldspace._cachedValueObjects; const selectorHelperFn = index => selectFn( - prepareSelectionData(fields, formattedFieldsData, rawFieldsData, index), + cachedValueObjects[index], index, cloneProvider, cachedStore @@ -172,72 +171,97 @@ export const cloneWithAllFields = (model) => { return clonedDm; }; -const getKey = (arr, data, fn) => { - let key = fn(arr, data, 0); +const getKey = (arr, data, fn, rowId) => { + let key = fn(arr, data, 0, rowId); for (let i = 1, len = arr.length; i < len; i++) { - key = `${key},${fn(arr, data, i)}`; + key = `${key},${fn(arr, data, i, rowId)}`; } return key; }; +const keyFn = (arr, fields, idx, rowId) => { + const field = arr[idx]; + const val = field === ROW_ID ? rowId : fields[field].internalValue; + return val; +}; + +const domainChecker = (val, domain) => { + const domainArr = domain[0] instanceof Array ? domain : [domain]; + return domainArr.some(dom => val >= dom[0] && val <= dom[1]); +}; + +const boundsChecker = { + [MeasureSubtype.CONTINUOUS]: domainChecker, + [DimensionSubtype.TEMPORAL]: domainChecker +}; + +const isWithinDomain = (value, domain, fieldType) => boundsChecker[fieldType](value, domain); + export const filterPropagationModel = (model, propModels, config = {}) => { let fns = []; const operation = config.operation || LOGICAL_OPERATORS.AND; - const filterByMeasure = config.filterByMeasure || false; - const clonedModel = cloneWithAllFields(model); + const { filterByDim = true, filterByMeasure = false, clone = true } = config; + const clonedModel = clone ? cloneWithAllFields(model) : model; const modelFieldsConfig = clonedModel.getFieldsConfig(); if (!propModels.length) { fns = [() => false]; } else { - fns = propModels.map(propModel => ((dataModel) => { - let keyFn; - const dataObj = dataModel.getData(); - const fieldsConfig = dataModel.getFieldsConfig(); - const dimensions = Object.keys(dataModel.getFieldspace().getDimension()) - .filter(d => d in modelFieldsConfig); - const dLen = dimensions.length; - const indices = dimensions.map(d => - fieldsConfig[d].index); - const measures = Object.keys(dataModel.getFieldspace().getMeasure()) - .filter(d => d in modelFieldsConfig); - const fieldsSpace = dataModel.getFieldspace().fieldsObj(); - const data = dataObj.data; - const domain = measures.reduce((acc, v) => { - acc[v] = fieldsSpace[v].domain(); - return acc; + fns = propModels.map(propModel => (({ criteria = {} }) => { + const { identifiers = [[], []], range } = criteria; + let [fieldNames = [], values = []] = identifiers; + const indices = fieldNames.reduce((map, name, i) => { + map[name] = i; + return map; }, {}); + fieldNames = fieldNames.filter(field => (field in modelFieldsConfig && + modelFieldsConfig[field].def.type === FieldType.DIMENSION) || field === ROW_ID); + const dLen = fieldNames.length; const valuesMap = {}; - keyFn = (arr, row, idx) => row[arr[idx]]; if (dLen) { - data.forEach((row) => { - const key = getKey(indices, row, keyFn); + for (let i = 1, len = identifiers.length; i < len; i++) { + const row = identifiers[i]; + const key = `${fieldNames.map((field) => { + const idx = indices[field]; + return row[idx]; + })}`; valuesMap[key] = 1; - }); + } + } + let rangeKeys = Object.keys(range || {}).filter(field => field in modelFieldsConfig); + const hasData = values.length || rangeKeys.length; + + if (!filterByMeasure) { + rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.MEASURE); } - keyFn = (arr, fields, idx) => fields[arr[idx]].internalValue; - return data.length ? (fields) => { - const present = dLen ? valuesMap[getKey(dimensions, fields, keyFn)] : true; + if (!filterByDim) { + rangeKeys = rangeKeys.filter(field => modelFieldsConfig[field].def.type !== FieldType.DIMENSION); + } - if (filterByMeasure) { - return measures.every(field => fields[field].internalValue >= domain[field][0] && - fields[field].internalValue <= domain[field][1]) && present; + return hasData ? (fields, i) => { + let present = true; + if (filterByDim) { + present = dLen ? valuesMap[getKey(fieldNames, fields, keyFn, i)] : true; } - return present; + + return rangeKeys.every((field) => { + const val = fields[field].internalValue; + return isWithinDomain(val, range[field], modelFieldsConfig[field].def.subtype); + }) && present; } : () => false; })(propModel)); } let filteredModel; if (operation === LOGICAL_OPERATORS.AND) { - filteredModel = clonedModel.select(fields => fns.every(fn => fn(fields)), { + filteredModel = clonedModel.select((fields, i) => fns.every(fn => fn(fields, i)), { saveChild: false }); } else { - filteredModel = clonedModel.select(fields => fns.some(fn => fn(fields)), { + filteredModel = clonedModel.select((fields, i) => fns.some(fn => fn(fields, i)), { saveChild: false }); } @@ -374,27 +398,12 @@ export const sanitizeUnitSchema = (unitSchema) => { }; export const validateUnitSchema = (unitSchema) => { - const supportedMeasureSubTypes = [MeasureSubtype.CONTINUOUS]; - const supportedDimSubTypes = [ - DimensionSubtype.CATEGORICAL, - DimensionSubtype.BINNED, - DimensionSubtype.TEMPORAL, - DimensionSubtype.GEO - ]; const { type, subtype, name } = unitSchema; - - switch (type) { - case FieldType.DIMENSION: - if (supportedDimSubTypes.indexOf(subtype) === -1) { - throw new Error(`DataModel doesn't support dimension field subtype ${subtype} used for ${name} field`); - } - break; - case FieldType.MEASURE: - if (supportedMeasureSubTypes.indexOf(subtype) === -1) { + if (type === FieldType.DIMENSION || type === FieldType.MEASURE) { + if (!fieldRegistry.has(subtype)) { throw new Error(`DataModel doesn't support measure field subtype ${subtype} used for ${name} field`); } - break; - default: + } else { throw new Error(`DataModel doesn't support field type ${type} used for ${name} field`); } }; @@ -420,13 +429,14 @@ export const resolveFieldName = (schema, dataHeader) => { export const updateData = (relation, data, schema, options) => { schema = sanitizeAndValidateSchema(schema); options = Object.assign(Object.assign({}, defaultConfig), options); - const converterFn = converter[options.dataFormat]; + const converter = converterStore.get(options.dataFormat); - if (!(converterFn && typeof converterFn === 'function')) { + + if (!converter) { throw new Error(`No converter function found for ${options.dataFormat} format`); } - const [header, formattedData] = converterFn(data, schema, options); + const [header, formattedData] = converter.convert(data, schema, options); resolveFieldName(schema, header); const fieldArr = createFields(formattedData, schema, header); @@ -524,12 +534,8 @@ const getFilteredModel = (propModel, path) => { }; const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = {}) => { - const nonTraversingModel = propModelInf.nonTraversingModel; const excludeModels = propModelInf.excludeModels || []; - - if (dataModel === nonTraversingModel) { - return; - } + const criterias = propModelInf.criteria; const propagate = excludeModels.length ? excludeModels.indexOf(dataModel) === -1 : true; @@ -537,7 +543,16 @@ const propagateIdentifiers = (dataModel, propModel, config = {}, propModelInf = const children = dataModel._children; children.forEach((child) => { - const selectionModel = applyExistingOperationOnModel(propModel, child); + const matchingCriteria = criterias.filter(val => val.groupedModel === child); + let selectionModel = applyExistingOperationOnModel(propModel, child); + + if (matchingCriteria.length) { + selectionModel = filterPropagationModel(selectionModel, matchingCriteria, { + filterByDim: false, + filterByMeasure: true, + clone: false + }); + } propagateIdentifiers(child, selectionModel, config, propModelInf); }); }; @@ -566,18 +581,28 @@ export const getPathToRootModel = (model, path = []) => { export const propagateToAllDataModels = (identifiers, rootModels, propagationInf, config) => { let criteria; - let propModel; const { propagationNameSpace, propagateToSource } = propagationInf; const propagationSourceId = propagationInf.sourceId; - const propagateInterpolatedValues = config.propagateInterpolatedValues; const filterFn = (entry) => { const filter = config.filterFn || (() => true); return filter(entry, config); }; + const addGroupedModel = ({ config: conf, model }) => { + const { criteria: crit } = conf; + let groupedModel; + + if (crit !== null && crit.fields.some(d => d.type === FieldType.MEASURE)) { + groupedModel = getRootGroupByModel(model); + } + return Object.assign({}, conf, { + groupedModel + }); + }; + let criterias = []; - if (identifiers === null && config.persistent !== true) { + if (identifiers === null) { criterias = [{ criteria: [] }]; @@ -588,7 +613,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf actionCriterias = actionCriterias.filter(d => d.config.sourceId !== propagationSourceId); } - const filteredCriteria = actionCriterias.filter(filterFn).map(action => action.config.criteria); + const filteredCriteria = actionCriterias.filter(filterFn); const excludeModels = []; @@ -600,7 +625,7 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf if (actionConf.applyOnSource === false && actionConf.action === config.action && actionConf.sourceId !== propagationSourceId) { excludeModels.push(actionInf.model); - criteria = sourceActionCriterias.filter(d => d !== actionInf).map(d => d.config.criteria); + criteria = sourceActionCriterias.filter(d => d !== actionInf).map(addGroupedModel); criteria.length && criterias.push({ criteria, models: actionInf.model, @@ -611,7 +636,11 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf } - criteria = [].concat(...[...filteredCriteria, identifiers]).filter(d => d !== null); + criteria = [].concat(...[...filteredCriteria.map(addGroupedModel), { + criteria: identifiers, + groupedModel: identifiers !== null && identifiers.fields.some(d => d.type === FieldType.MEASURE) ? + getRootGroupByModel(propagationInf.propagationSource) : null + }]).filter(d => d !== null); criterias.push({ criteria, excludeModels: [...excludeModels, ...config.excludeModels || []] @@ -625,16 +654,11 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf propagationSourceId }, config); - const rootGroupByModel = rootModels.groupByModel; - if (propagateInterpolatedValues && rootGroupByModel) { - propModel = filterPropagationModel(rootGroupByModel, criteria, { - filterByMeasure: propagateInterpolatedValues - }); - propagateIdentifiers(rootGroupByModel, propModel, propConfig); - } - criterias.forEach((inf) => { - const propagationModel = filterPropagationModel(rootModel, inf.criteria); + const { criteria: crit } = inf; + const propagationModel = filterPropagationModel(rootModel, crit, { + filterByMeasure: !!crit.find(d => d.groupedModel === rootModel) + }); const path = inf.path; if (path) { @@ -643,13 +667,13 @@ export const propagateToAllDataModels = (identifiers, rootModels, propagationInf } else { propagateIdentifiers(rootModel, propagationModel, propConfig, { excludeModels: inf.excludeModels, - nonTraversingModel: propagateInterpolatedValues && rootGroupByModel + criteria: crit }); } }); }; -export const propagateImmutableActions = (propagationNameSpace, rootModels, propagationInf) => { +export const propagateImmutableActions = (propagationNameSpace, rootModel, propagationInf) => { const immutableActions = propagationNameSpace.immutableActions; for (const action in immutableActions) { @@ -660,10 +684,14 @@ export const propagateImmutableActions = (propagationNameSpace, rootModels, prop propagationInf.propConfig.filterImmutableAction(actionConf, propagationInf.config) : true; if (actionConf.sourceId !== propagationSourceId && filterImmutableAction) { const criteriaModel = actionConf.criteria; - propagateToAllDataModels(criteriaModel, rootModels, { + propagateToAllDataModels(criteriaModel, { + model: rootModel, + groupByModel: getRootGroupByModel(actionInf.model) + }, { propagationNameSpace, propagateToSource: false, - sourceId: propagationSourceId + sourceId: propagationSourceId, + propagationSource: actionInf.model }, actionConf); } } diff --git a/src/index.spec.js b/src/index.spec.js index 127ab33..55ccc7f 100644 --- a/src/index.spec.js +++ b/src/index.spec.js @@ -3,7 +3,7 @@ import { expect } from 'chai'; import { FilteringMode, DataFormat } from './enums'; -import { DM_DERIVATIVES } from './constants'; +import { DM_DERIVATIVES, ROW_ID } from './constants'; import DataModel from './index'; import pkg from '../package.json'; import InvalidAwareTypes from './invalid-aware-types'; @@ -407,7 +407,7 @@ describe('DataModel', () => { schema: [ { name: 'name', type: 'dimension', subtype: 'categorical' }, { name: 'birthday', type: 'dimension', subtype: 'temporal', format: '%Y-%m-%d' }, - { name: 'uid', type: 'identifier' } + { name: ROW_ID, type: 'dimension' } ], uids: [0, 1, 2] }; @@ -1903,30 +1903,30 @@ describe('DataModel', () => { { name: 'first', type: 'dimension' }, { name: 'second', type: 'dimension' }, ]; - const propModel = new DataModel([{ - first: 'Hey', - second: 'Jude' - }], [{ - name: 'first', - type: 'dimension' - }, { - name: 'second', - type: 'dimension' - }]); - const propModel1 = new DataModel([{ - first: 'Hey', - second: 'Jude', - count: 100 - }], [{ - name: 'first', - type: 'dimension' - }, { - name: 'second', - type: 'dimension' - }, { - name: 'count', - type: 'measure' - }]); + + const propModel = { + fields: [{ + name: 'first', + type: 'dimension' + }, { + name: 'second', + type: 'dimension' + }], + data: [ + ['first', 'second'], + ['Hey', 'Jude'] + ] + }; + + const propModel1 = { + fields: [{ + name: 'sales', + type: 'measure' + }], + range: { + sales: [20, 25] + } + }; let dataModel; let projectionFlag = false; @@ -1935,12 +1935,14 @@ describe('DataModel', () => { let projected; let selected; let grouped; + let groupedChild; beforeEach(() => { dataModel = new DataModel(data1, schema1); projected = dataModel.project(['profit']); selected = dataModel.select(fields => fields.profit.valueOf() > 10); grouped = dataModel.groupBy(['first']); + groupedChild = grouped.select(() => true); // setup listeners projected.on('propagation', () => { projectionFlag = true; @@ -2042,6 +2044,27 @@ describe('DataModel', () => { projectionFlag && selectionFlag && groupByFlag ).to.be.true; }); + + it('Should propagate when measures are present in criteria', () => { + groupedChild.propagate(propModel1, { + action: 'highlight', + isMutableAction: true, + sourceId: 'canvas-1', + applyOnSource: false, + propagateToSource: true, + criteria: propModel1 + }, true); + + groupedChild.propagate(propModel1, { + action: 'highlight', + isMutableAction: true, + sourceId: 'canvas-2', + applyOnSource: false, + propagateToSource: true, + criteria: propModel1 + }, true); + expect(projectionFlag && selectionFlag && groupByFlag).to.be.true; + }); }); describe('#getUIDs', () => { diff --git a/src/operator/data-builder.js b/src/operator/data-builder.js index 4e4b84d..d2238fa 100644 --- a/src/operator/data-builder.js +++ b/src/operator/data-builder.js @@ -1,5 +1,7 @@ import { rowDiffsetIterator } from './row-diffset-iterator'; import { sortData } from './sort'; +import { FieldType } from '../enums'; +import { ROW_ID } from '../constants'; /** * Builds the actual data array. @@ -48,8 +50,8 @@ export function dataBuilder (fieldStore, rowDiffset, colIdentifier, sortingDetai if (addUid) { retObj.schema.push({ - name: 'uid', - type: 'identifier' + name: ROW_ID, + type: FieldType.DIMENSION }); }