diff x/static/js/8120.85042513.chunk.js.map @ 125:49f3d3878413 draft

planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/jbrowse2 commit 5ea1f9c1eef1de76232e69aa6d34cda77d90d566
author fubar
date Sat, 05 Oct 2024 23:58:05 +0000
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/x/static/js/8120.85042513.chunk.js.map	Sat Oct 05 23:58:05 2024 +0000
@@ -0,0 +1,1 @@
+{"version":3,"file":"static/js/8120.85042513.chunk.js","mappings":"+IAAA,MAAMA,GAMS,MAAMC,EAArB,cACE,KAAAC,QAAU,IAAIC,IACd,KAAAC,gBAAkB,IAAIC,eAyCxB,CAjCE,SAAAC,CAAUC,EAAsB,IAAIP,GAClC,GAAIQ,KAAKD,OAAOE,QACd,MAAM,IAAIC,MAAM,yCAKlBF,KAAKN,QAAQS,IAAIJ,GACbA,EAAOE,QAGTD,KAAKI,cAAcL,GACyB,mBAA5BA,EAAOM,kBACvBN,EAAOM,iBAAiB,SAAS,KAC/BL,KAAKI,cAAcL,EAAO,GAGhC,CAEA,aAAAK,CAAcL,GACZC,KAAKN,QAAQY,OAAOP,GACM,IAAtBC,KAAKN,QAAQa,MACfP,KAAKJ,gBAAgBY,OAEzB,CAEA,UAAIT,GACF,OAAOC,KAAKJ,gBAAgBG,MAC9B,CAEA,KAAAS,GACER,KAAKJ,gBAAgBY,OACvB,EChDa,MAAMC,EAArB,cACE,KAAAC,UAAY,IAAIf,GAclB,CAXE,WAAAgB,CAAYC,EAAqB,QAC/BZ,KAAKU,UAAUP,IAAIS,GACnBA,EAASZ,KAAKa,eAChB,CAEA,QAAAD,CAASE,GACPd,KAAKa,eAAiBC,EACtB,IAAK,MAAMC,KAAOf,KAAKU,UACrBK,EAAID,EAER,ECSa,MAAME,EAWnB,WAAAC,EAAY,KACVC,EAAI,MACJC,IAKA,GAAoB,mBAATD,EACT,MAAM,IAAIE,UAAU,6BAEtB,GAAqB,iBAAVD,EACT,MAAM,IAAIC,UAAU,4BAEtB,GACuB,mBAAdD,EAAME,KACQ,mBAAdF,EAAMG,KACW,mBAAjBH,EAAMb,OAEb,MAAM,IAAIc,UACR,qEAIJpB,KAAKmB,MAAQA,EACbnB,KAAKuB,aAAeL,CACtB,CAEA,uBAAOM,CAAiBC,GACtB,MAEqB,eAAnBA,EAAUC,MAGS,gBAAnBD,EAAUE,MAEY,wBAAtBF,EAAUX,SAEY,mBAAtBW,EAAUX,OAEd,CAEA,KAAAc,CAAMC,EAAaC,GACb9B,KAAKmB,MAAME,IAAIQ,KAASC,GAC1B9B,KAAKmB,MAAMb,OAAOuB,EAEtB,CAEA,IAAAX,CAAKW,EAAaE,EAAShC,EAAsBiC,GAC/C,MAAMC,EAAU,IAAIxC,EACdyC,EAAiB,IAAIzB,EAC3ByB,EAAevB,YAAYqB,GAC3B,MAAMG,EAAqB,CACzBF,QAASA,EACTG,QAASpC,KAAKuB,aAAaQ,EAAME,EAAQlC,QAASe,IAChDoB,EAAetB,SAASE,EAAQ,IAElCuB,SAAS,EACTH,iBACA,WAAIjC,GACF,OAAOD,KAAKiC,QAAQlC,OAAOE,OAC7B,GAEFkC,EAASF,QAAQnC,UAAUC,GAG3BoC,EAASF,QAAQlC,OAAOM,iBAAiB,SAAS,KAC3C8B,EAASE,SACZrC,KAAK4B,MAAMC,EAAKM,EAClB,IAIFA,EAASC,QACNE,MACC,KACEH,EAASE,SAAU,CAAI,IAEzB,KACEF,EAASE,SAAU,EAGnBrC,KAAK4B,MAAMC,EAAKM,EAAS,IAG5BI,OAAMC,IAIL,MADAC,QAAQD,MAAMA,GACRA,CAAK,IAGfxC,KAAKmB,MAAMG,IAAIO,EAAKM,EACtB,CAEA,yBAAOO,CAAsBN,EAAqBrC,GAIhD,SAAS4C,IACP,GAAI5C,aAAM,EAANA,EAAQE,QACV,MAAM2C,OAAOC,OAAO,IAAI3C,MAAM,WAAY,CAAEyB,KAAM,eAEtD,CAEA,OAAOS,EAAQE,MACbQ,IACEH,IACOG,KAETN,IAEE,MADAG,IACMH,CAAK,GAGjB,CAEA,GAAAO,CAAIlB,GACF,OAAO7B,KAAKmB,MAAM4B,IAAIlB,EACxB,CAeA,GAAAR,CACEQ,EACAE,EACAhC,EACAiC,GAEA,IAAKjC,GAAUgC,aAAgBiB,YAC7B,MAAM,IAAI5B,UACR,yGAGJ,MAAM6B,EAAajD,KAAKmB,MAAME,IAAIQ,GAElC,OAAIoB,EACEA,EAAWhD,UAAYgD,EAAWZ,SAEpCrC,KAAK4B,MAAMC,EAAKoB,GACTjD,KAAKqB,IAAIQ,EAAKE,EAAMhC,EAAQiC,IAGjCiB,EAAWZ,QAENY,EAAWb,SAKpBa,EAAWhB,QAAQnC,UAAUC,GAC7BkD,EAAWf,eAAevB,YAAYqB,GAE/BhB,EAAsB0B,mBAC3BO,EAAWb,QACXrC,KAKJC,KAAKkB,KAAKW,EAAKE,EAAMhC,EAAQiC,GACtBhB,EAAsB0B,mBAG3B1C,KAAKmB,MAAME,IAAIQ,GAAMO,QACrBrC,GAEJ,CAQA,OAAO8B,GACL,MAAMqB,EAAclD,KAAKmB,MAAME,IAAIQ,GAC/BqB,IACGA,EAAYb,SACfa,EAAYjB,QAAQzB,QAEtBR,KAAKmB,MAAMb,OAAOuB,GAEtB,CAMA,KAAAsB,GAEE,MAAMC,EAAUpD,KAAKmB,MAAMkC,OAC3B,IAAIC,EAAc,EAClB,IAAK,IAAIR,EAASM,EAAQG,QAAST,EAAOU,KAAMV,EAASM,EAAQG,OAC/DvD,KAAKM,OAAOwC,EAAOW,OACnBH,GAAe,EAEjB,OAAOA,CACT,E,6FCjOFI,eAAeC,EAAMC,GACnB,IACE,IAAIC,EACAC,EAAM,EACNC,EAAI,EACR,MAAMC,EAAS,GACf,IACIC,EADAC,EAAY,EAEhB,EAAG,CACD,MAAMC,EAAiBP,EAAUQ,SAASN,GAK1C,GAJAG,EAAW,IAAI,EAAAI,UAEXR,QAASI,GACbA,EAASK,KAAKH,EAAgB,EAAAI,cAC1BN,EAASO,IACX,MAAM,IAAItE,MAAM+D,EAASQ,KAG3BX,GAAOD,EAAKa,QACZV,EAAOD,GAAKE,EAASnB,OACrBoB,GAAaF,EAAOD,GAAGY,OACvBZ,GAAK,C,OACEF,EAAKe,UAEd,MAAM9B,EAAS,IAAI+B,WAAWX,GAC9B,IAAK,IAAIH,EAAI,EAAGe,EAAS,EAAGf,EAAIC,EAAOW,OAAQZ,IAC7CjB,EAAOxB,IAAI0C,EAAOD,GAAIe,GACtBA,GAAUd,EAAOD,GAAGY,OAEtB,OAAO,KAAOI,KAAKjC,E,CACnB,MAAOkC,GAEP,GAAI,GAAGA,IAAIC,MAAM,0BACf,MAAM,IAAI/E,MACR,4DAGJ,MAAM8E,C,CAEV,CAgDAtB,eAAewB,EAAgBtB,EAAmBuB,GAChD,IACE,IAAItB,EACJ,MAAM,KAAEuB,EAAI,KAAEC,GAASF,EACvB,IAAIG,EAAOF,EAAKG,cACZC,EAAOJ,EAAKK,aAChB,MAAMzB,EAAS,GACT0B,EAAa,GACbC,EAAa,GAEnB,IAAIzB,EAAY,EACZH,EAAI,EACR,EAAG,CACD,MAAMI,EAAiBP,EAAUQ,SAASkB,EAAOF,EAAKG,eAChDtB,EAAW,IAAI,EAAAI,QAIrB,KAFIR,QAASI,GACbA,EAASK,KAAKH,EAAgB,EAAAI,cAC1BN,EAASO,IACX,MAAM,IAAItE,MAAM+D,EAASQ,KAG3B,MAAMmB,EAAS3B,EAASnB,OACxBkB,EAAOM,KAAKsB,GACZ,IAAIC,EAAMD,EAAOjB,OAEjBe,EAAWpB,KAAKgB,GAChBK,EAAWrB,KAAKkB,GACM,IAAlBxB,EAAOW,QAAgBS,EAAKK,eAE9BzB,EAAO,GAAKA,EAAO,GAAGI,SAASgB,EAAKK,cACpCI,EAAM7B,EAAO,GAAGW,QAElB,MAAMmB,EAAWR,EAIjB,GAHAA,GAAQzB,EAAKa,QACbc,GAAQK,EAEJC,GAAYT,EAAKE,cAAe,CAKlCvB,EAAOD,GAAKC,EAAOD,GAAGK,SACpB,EACAiB,EAAKE,gBAAkBH,EAAKG,cACxBF,EAAKI,aAAeL,EAAKK,aAAe,EACxCJ,EAAKI,aAAe,GAG1BC,EAAWpB,KAAKgB,GAChBK,EAAWrB,KAAKkB,GAChBtB,GAAaF,EAAOD,GAAGY,OACvB,K,CAEFT,GAAaF,EAAOD,GAAGY,OACvBZ,G,OACOF,EAAKe,UAEd,MAAM9B,EAAS,IAAI+B,WAAWX,GAC9B,IAAK,IAAIH,EAAI,EAAGe,EAAS,EAAGf,EAAIC,EAAOW,OAAQZ,IAC7CjB,EAAOxB,IAAI0C,EAAOD,GAAIe,GACtBA,GAAUd,EAAOD,GAAGY,OAItB,MAAO,CAAEiB,OAFM,KAAOb,KAAKjC,GAEV4C,aAAYC,a,CAC7B,MAAOX,GAEP,GAAI,GAAGA,IAAIC,MAAM,0BACf,MAAM,IAAI/E,MACR,4DAGJ,MAAM8E,C,CAEV,C,wBC5Ke,MAAMe,EAKnB,WAAA9E,EAAY,WACV+E,EAAU,KACVC,IAKA,GAAID,EACFhG,KAAKgG,WAAaA,MACb,KAAIC,EAGT,MAAM,IAAI7E,UAAU,6CAFpBpB,KAAKgG,WAAa,IAAI,KAAUC,E,CAIpC,CAEA,qBAAAC,CAAsBC,EAAarB,EAAS,EAAGsB,GAAW,GAExD,MAAMC,EAAO,gBAAiBF,EAAIG,MAAMxB,EAAQA,EAAS,GAAIsB,GAC7D,GACEC,EAAKE,YAAYC,OAAOC,mBACxBJ,EAAKK,SAASF,OAAOG,kBAErB,MAAM,IAAIvF,UAAU,oBAGtB,OAAOiF,EAAKO,UACd,CAEA,SAAAC,GAIE,OAHK7G,KAAK8G,QACR9G,KAAK8G,MAAQ9G,KAAK+G,cAEb/G,KAAK8G,KACd,CAEA,gBAAMC,GACJ,IAAIZ,EAAM,KAAOa,YAAY,SACvBhH,KAAKgG,WAAWiB,KAAKd,EAAK,EAAG,EAAG,GACtC,MAAMe,EAAalH,KAAKkG,sBAAsBC,EAAK,GAAG,GACtD,IAAKe,EACH,MAAO,CAAC,CAAC,EAAG,IAGd,MAAMC,EAAU,IAAIC,MAAMF,EAAa,GACvCC,EAAQ,GAAK,CAAC,EAAG,GAGjB,MAAME,EAAU,GAAQH,EACxB,GAAIG,EAAUb,OAAOC,iBACnB,MAAM,IAAIrF,UAAU,oBAEtB+E,EAAM,KAAOa,YAAYK,SACnBrH,KAAKgG,WAAWiB,KAAKd,EAAK,EAAGkB,EAAS,GAC5C,IAAK,IAAIC,EAAc,EAAGA,EAAcJ,EAAYI,GAAe,EAAG,CACpE,MAAMC,EAAqBvH,KAAKkG,sBAC9BC,EACc,GAAdmB,GAEIE,EAAuBxH,KAAKkG,sBAChCC,EACc,GAAdmB,EAAmB,GAErBH,EAAQG,EAAc,GAAK,CAACC,EAAoBC,E,CAGlD,OAAOL,CACT,CAEA,kBAAMM,GACJ,MAAMN,QAAgBnH,KAAK6G,YAC3B,GAAKM,EAAQxC,OAGb,OAAOwC,EAAQA,EAAQxC,OAAS,EAClC,CAEA,8BAAM+C,CAAyB/C,EAAgBgD,GAC7C,MAAMC,EAAcD,EAAWhD,EAC/B,GAAe,IAAXA,EACF,MAAO,GAET,MAAMwC,QAAgBnH,KAAK6G,YACrBgB,EAAW,GAIXC,EAAU,CAAChG,EAAYiG,KAC3B,MAAMP,EAAuB1F,EA/FL,GAgGlBkG,EAA2BD,EAC7BA,EAjGoB,GAkGpBE,IAEJ,OACET,GAAwBG,GACxBK,EAA2BL,EAEpB,EAGLH,EAAuBG,GACjB,EAGH,CAAC,EAGV,IAAIO,EAAa,EACbC,EAAahB,EAAQxC,OAAS,EAC9ByD,EAAiBC,KAAKC,MAAMnB,EAAQxC,OAAS,GAE7C4D,EAAaT,EACfX,EAAQiB,GACRjB,EAAQiB,EAAiB,IAE3B,KAAsB,IAAfG,GACDA,EAAa,EACfJ,EAAaC,EAAiB,EACrBG,EAAa,IACtBL,EAAaE,EAAiB,GAEhCA,EAAiBC,KAAKG,MAAML,EAAaD,GAAc,GAAKA,EAC5DK,EAAaT,EAAQX,EAAQiB,GAAiBjB,EAAQiB,EAAiB,IAIzEP,EAASvD,KAAK6C,EAAQiB,IACtB,IAAIrE,EAAIqE,EAAiB,EACzB,KAAOrE,EAAIoD,EAAQxC,SACjBkD,EAASvD,KAAK6C,EAAQpD,MAClBoD,EAAQpD,GAzIY,IAyIiB6D,IAFhB7D,GAAK,GAShC,OAHI8D,EAASA,EAASlD,OAAS,GA7IL,GA6IiCiD,GACzDC,EAASvD,KAAK,IAETuD,CACT,EC/Ia,MAAMY,EAInB,WAAAxH,EAAY,WACV+E,EAAU,KACVC,EAAI,cACJyC,EAAa,QACbC,IAOA,GAAI3C,EACFhG,KAAKgG,WAAaA,MACb,KAAIC,EAGT,MAAM,IAAI7E,UAAU,6CAFpBpB,KAAKgG,WAAa,IAAI,KAAUC,E,CAKlC,IAAKyC,IAAkBC,IAAY1C,EACjC,MAAM,IAAI7E,UAAU,mDAGtBpB,KAAK4I,IAAM,IAAI7C,EAAS,CACtBC,WAAY0C,EACZzC,KAAOyC,GAAkBC,IAAW1C,EAAiB,GAAGA,QAAb0C,GAE/C,CAEA,UAAME,GACJ,MAAMC,QAAuB9I,KAAKgG,WAAW6C,OAC7C,OAAOjG,OAAOC,OAAOiG,EAAgB,CACnCvI,WAAYP,KAAK+I,0BACjBC,YAAQC,EACRC,aAASD,GAEb,CAEA,6BAAMF,GAGJ,MAAO,CAAEvB,SAA8BxH,KAAK4I,IAAInB,gBAE1C,KAAElH,SAAeP,KAAKgG,WAAW6C,OAEjC1C,EAAM,KAAOa,YAAY,IAGzB,UAAEmC,SAAoBnJ,KAAKgG,WAAWiB,KAAKd,EAAK,EAAG,EAAG5F,EAAO,GAAK,GACxE,GAAkB,IAAd4I,EACF,MAAM,IAAIjJ,MAAM,cAGlB,OAAOsH,EAD2BrB,EAAIiD,aAAa,EAErD,CAEA,6BAAMC,CACJC,GACC/B,IACAgC,IAED,IAAIhG,EAAOgG,EACNhG,IACHA,SAAcvD,KAAKgG,WAAW6C,QAAQtI,MAIxC,MAAMiJ,EAAwBjG,EAAOgE,EAcrC,aAZMvH,KAAKgG,WAAWiB,KACpBqC,EACA,EACAE,EACAjC,SAI2B5D,EAC3B2F,EAAYhD,MAAM,EAAGkD,GAIzB,CAEA,UAAMvC,CAAKd,EAAarB,EAAgBH,EAAgBgD,GAEtD,MAAM8B,QAAuBzJ,KAAK4I,IAAIlB,yBACpC/C,EACAgD,GAEI2B,EAAc,KAAOtC,YAAY,OAEvC,IAAI0C,EAAoB5E,EACpBqE,EAAY,EAChB,IACE,IAAIQ,EAAW,EACfA,EAAWF,EAAe9E,OAAS,EACnCgF,GAAY,EACZ,CAEA,MAAMC,QAA2B5J,KAAKqJ,wBACpCC,EACAG,EAAeE,GACfF,EAAeE,EAAW,KAErB,CAAEnC,GAAwBiC,EAAeE,GAC1CE,EACJrC,GAAwBG,EAAW,EAAIA,EAAWH,EAC9CsC,EACJzB,KAAK0B,IACHpC,EAAWhD,EACX6C,EAAuBoC,EAAmBjF,QACxC6C,EACFqC,GAAgB,GAAKA,EAAeD,EAAmBjF,SACzDiF,EAAmBI,KAAK7D,EAAKuD,EAAmBG,EAAcC,GAC9DJ,GAAqBI,EAAYD,EACjCV,GAAaW,EAAYD,E,CAI7B,MAAO,CAAEV,YAAWvD,OAAQO,EAC9B,E,uGCjIK,SAAS8D,EAAa5D,GAC3B,GACEA,EAAKE,YAAYC,OAAOC,mBACxBJ,EAAKK,SAASF,OAAOG,kBAErB,MAAM,IAAIzG,MAAM,oBAElB,OAAOmG,EAAKO,UACd,CAEA,MAAMsD,UAAmBhK,OAelB,SAASiK,EAAiBpK,GAC/B,GAAKA,GAIDA,EAAOE,QAAS,CAClB,GAA4B,oBAAjBmK,aACT,MAAM,IAAIA,aAAa,UAAW,cAC7B,CACL,MAAMpF,EAAI,IAAIkF,EAAW,WAEzB,MADAlF,EAAErD,KAAO,cACHqD,CACR,CACF,CACF,CAoBO,SAASqF,EAAerG,EAAiBsG,GAC9C,MAAMC,EAAwB,GAC9B,IAAIC,EAA0B,KAE9B,OAAsB,IAAlBxG,EAAOW,OACFX,GAGTA,EAAOyG,MAAK,SAAUC,EAAIC,GACxB,MAAMC,EAAMF,EAAGtF,KAAKG,cAAgBoF,EAAGvF,KAAKG,cAC5C,OAAe,IAARqF,EAAYA,EAAMF,EAAGtF,KAAKK,aAAekF,EAAGvF,KAAKK,YAC1D,IAEAzB,EAAO6G,SAAQ1F,IApBV,IAAwB2F,EAAeC,IAqBrCT,GAAUnF,EAAME,KAAK2F,UAAUV,GAAU,KAC1B,OAAdE,GACFD,EAAajG,KAAKa,GAClBqF,EAAYrF,IAxBW2F,EA0BJN,GA1BmBO,EA0BR5F,GAxB3BC,KAAKG,cAAgBuF,EAAOzF,KAAKE,cAAgB,MACxDwF,EAAO1F,KAAKE,cAAgBuF,EAAO1F,KAAKG,cAAgB,IAwB9CJ,EAAME,KAAK2F,UAAUR,EAAUnF,MAAQ,IACzCmF,EAAUnF,KAAOF,EAAME,OAGzBkF,EAAajG,KAAKa,GAClBqF,EAAYrF,IAGlB,IAGKoF,EACT,C,wBC7Fe,MAAMU,EAGnB,WAAAhK,CAAYsE,EAAuBE,GACjCzF,KAAKuF,cAAgBA,EACrBvF,KAAKyF,aAAeA,CACtB,CAEA,QAAAyF,GACE,MAAO,GAAGlL,KAAKuF,iBAAiBvF,KAAKyF,cACvC,CAEA,SAAAuF,CAAUG,GACR,OACEnL,KAAKuF,cAAgB4F,EAAE5F,eAAiBvF,KAAKyF,aAAe0F,EAAE1F,YAElE,EAEK,SAAS2F,EAAUC,EAAevG,EAAS,EAAGwG,GAAY,GAC/D,GAAIA,EACF,MAAM,IAAIpL,MAAM,mDAGlB,OAAO,IAAI+K,EACY,cAArBI,EAAMvG,EAAS,GACQ,WAArBuG,EAAMvG,EAAS,GACM,SAArBuG,EAAMvG,EAAS,GACM,MAArBuG,EAAMvG,EAAS,GACM,IAArBuG,EAAMvG,EAAS,GACfuG,EAAMvG,EAAS,GAChBuG,EAAMvG,EAAS,IAAO,EAAKuG,EAAMvG,GAEtC,CC9Be,MAAMyG,EAMnB,WAAAtK,CACEmE,EACAC,EACAmG,EACAC,OAAcxC,GAEdjJ,KAAKoF,KAAOA,EACZpF,KAAKqF,KAAOA,EACZrF,KAAKwL,IAAMA,EACXxL,KAAK0L,aAAeD,CACtB,CAEA,cAAAE,GAEE,MAAO,GAAG3L,KAAKoF,SAASpF,KAAKqF,aAC3BrF,KAAKwL,oBACUxL,KAAKyL,gBACxB,CAEA,QAAAP,GACE,OAAOlL,KAAK2L,gBACd,CAEA,SAAAX,CAAUG,GACR,OACEnL,KAAKoF,KAAK4F,UAAUG,EAAE/F,OACtBpF,KAAKqF,KAAK2F,UAAUG,EAAE9F,OACtBrF,KAAKwL,IAAML,EAAEK,GAEjB,CAEA,WAAAC,GACE,YAA0BxC,IAAtBjJ,KAAK0L,aACA1L,KAAK0L,aAEP1L,KAAKqF,KAAKE,cAAgB,MAAYvF,KAAKoF,KAAKG,aACzD,ECzBa,MAAeqG,EAK5B,WAAA3K,EAAY,WACV+E,EAAU,cACV6F,EAAiBC,GAAcA,IAK/B9L,KAAKgG,WAAaA,EAClBhG,KAAK+L,aAAeF,CACtB,CAMO,iBAAMG,CAAYC,EAAgB,CAAC,GACxC,MAAQC,QAASC,KAAaC,SAAepM,KAAKqM,MAAMJ,GACxD,OAAOG,CACT,CASA,cAAAE,CACEC,EACAC,GAEA,OAAID,EACKA,EAAWvB,UAAUwB,GAAiB,EACzCA,EACAD,EAEGC,CAEX,CAEA,WAAMH,CAAMJ,EAAgB,CAAC,GAO3B,OANKjM,KAAKyM,SACRzM,KAAKyM,OAASzM,KAAK0M,OAAOT,GAAM1J,OAAOyC,IAErC,MADAhF,KAAKyM,YAASxD,EACRjE,CAAC,KAGJhF,KAAKyM,MACd,CAEA,eAAME,CAAUC,EAAeX,EAAgB,CAAC,G,MAE9C,SAA2B,QAAlB,SADSjM,KAAKqM,MAAMJ,IAChBC,QAAQU,UAAM,eAAEC,SAC/B,EClDa,MAAMC,UAAmBlB,EACtC,eAAMmB,CAAUC,EAAiBf,EAAgB,CAAC,GAChD,MAAMgB,QAAkBjN,KAAKqM,MAAMJ,GAC7BiB,EAAQD,EAAUE,YAAYH,GACpC,QAAc/D,IAAViE,EACF,OAAQ,EAGV,IADYD,EAAUf,QAAQgB,GAE5B,OAAQ,EAEV,MAAM,MAAEE,GAAUH,EAAUf,QAAQgB,GACpC,OAAIE,EACKA,EAAML,WAEP,CACV,CAGA,YAAML,CAAOT,EAAgB,CAAC,GAC5B,MAAM9F,QAAYnG,KAAKgG,WAAWqH,SAASpB,GACrCZ,QAAc,IAAA1H,OAAMwC,GAI1B,GAHAgE,EAAiB8B,EAAKlM,QA1CR,WA6CVsL,EAAMjC,aAAa,GACrB,MAAM,IAAIlJ,MAAM,kBAKlB,MAAMoN,EAAWjC,EAAMkC,YAAY,GAC7BC,EAAcnC,EAAMkC,YAAY,GAChCE,EACU,MAAdD,EAAwB,uBAAyB,iBAM7CE,EALqC,CACzC,EAAG,UACH,EAAG,MACH,EAAG,OAEmC,GAAdF,GAC1B,IAAKE,EACH,MAAM,IAAIxN,MAAM,qCAAqCsN,KAEvD,MAAMG,EAAgB,CACpBC,IAAKvC,EAAMkC,YAAY,IACvBM,MAAOxC,EAAMkC,YAAY,IACzBO,IAAKzC,EAAMkC,YAAY,KAEnBQ,EAAY1C,EAAMkC,YAAY,IAI9BS,EAAWD,EAAYE,OAAOC,aAAaH,GAAa,KACxDI,EAAY9C,EAAMkC,YAAY,IAG9Ba,EAAoB/C,EAAMkC,YAAY,KACtC,YAAEJ,EAAW,YAAEkB,GAAgBrO,KAAKsO,gBACxCjD,EAAM/E,MAAM,GAAI,GAAK8H,IAIvB,IACIG,EADAC,EAAa,GAAKJ,EAiDtB,MAAO,CACLlC,QAhDc,IAAI9E,MAAMkG,GAAUpM,KAAK,GAAGuN,KAAI,KAE9C,MAAMC,EAAWrD,EAAMkC,YAAYiB,GACnCA,GAAc,EACd,MAAM3B,EAAoC,CAAC,EAC3C,IAAIO,EACJ,IAAK,IAAIuB,EAAI,EAAGA,EAAID,EAAUC,GAAK,EAAG,CACpC,MAAMnD,EAAMH,EAAMjC,aAAaoF,GAE/B,GADAA,GAAc,EACVhD,EAAMoD,MACR,MAAM,IAAI1O,MACR,8DAEG,GAAY0O,QAARpD,EAA0B,CACnC,MAAMqD,EAAaxD,EAAMkC,YAAYiB,GACrCA,GAAc,EACK,IAAfK,IACFzB,EAAQpN,KAAK8O,eAAezD,EAAOmD,IAErCA,GAAc,GAAKK,CACrB,KAAO,CACL,MAAMA,EAAaxD,EAAMkC,YAAYiB,GACrCA,GAAc,EACd,MAAMxK,EAAS,IAAIoD,MAAMyH,GACzB,IAAK,IAAIE,EAAI,EAAGA,EAAIF,EAAYE,GAAK,EAAG,CACtC,MAAMC,EAAI5D,EAAUC,EAAOmD,GACrBS,EAAI7D,EAAUC,EAAOmD,EAAa,GACxCA,GAAc,GACdD,EAAgBvO,KAAKsM,eAAeiC,EAAeS,GACnDhL,EAAO+K,GAAK,IAAIxD,EAAMyD,EAAGC,EAAGzD,EAC9B,CACAqB,EAASrB,GAAOxH,CAClB,CACF,CAGA,MAAMkL,EAAc7D,EAAMkC,YAAYiB,GACtCA,GAAc,EACd,MAAMW,EAAc,IAAI/H,MAAM8H,GAC9B,IAAK,IAAIH,EAAI,EAAGA,EAAIG,EAAaH,GAAK,EACpCI,EAAYJ,GAAK3D,EAAUC,EAAOmD,GAClCA,GAAc,EACdD,EAAgBvO,KAAKsM,eAAeiC,EAAeY,EAAYJ,IAEjE,MAAO,CAAElC,WAAUsC,cAAa/B,QAAO,IAKvCY,WACAY,aAhEmB,MAiEnBQ,aAhEmB,UAiEnBjB,YACAI,gBACAZ,gBACAF,iBACAC,SACAW,cACAlB,cACAkC,aAAc,MAElB,CAEA,cAAAP,CAAezD,EAAevG,GAO5B,MAAO,CAAEiI,UANS9C,EAChB,gBACEoB,EAAM/E,MAAMxB,EAAS,GAAIA,EAAS,KAClC,IAIN,CAEA,eAAAwJ,CAAgBgB,GACd,IAAIC,EAAY,EACZC,EAAgB,EACpB,MAAMnB,EAAwB,GACxBlB,EAAsC,CAAC,EAC7C,IAAK,IAAIpJ,EAAI,EAAGA,EAAIuL,EAAW3K,OAAQZ,GAAK,EAC1C,IAAKuL,EAAWvL,GAAI,CAClB,GAAIyL,EAAgBzL,EAAG,CACrB,IAAIiJ,EAAUsC,EAAWpE,SAAS,OAAQsE,EAAezL,GACzDiJ,EAAUhN,KAAK+L,aAAaiB,GAC5BqB,EAAYkB,GAAavC,EACzBG,EAAYH,GAAWuC,CACzB,CACAC,EAAgBzL,EAAI,EACpBwL,GAAa,CACf,CAEF,MAAO,CAAEpC,cAAakB,cACxB,CAEA,oBAAMoB,CACJzC,EACAjD,EACA2F,EACAzD,EAAgB,CAAC,GAEblC,EAAM,IACRA,EAAM,GAGR,MAAMkD,QAAkBjN,KAAKqM,MAAMJ,GAC7BiB,EAAQD,EAAUE,YAAYH,GACpC,QAAc/D,IAAViE,EACF,MAAO,GAET,MAAMyC,EAAK1C,EAAUf,QAAQgB,GAC7B,IAAKyC,EACH,MAAO,IAGSA,EAAGR,YAAYxK,OAC7BgL,EAAGR,YACDpF,GAvMa,IAuMY4F,EAAGR,YAAYxK,OACpCgL,EAAGR,YAAYxK,OAAS,EACxBoF,GAzMS,IA2Mf,IAAIkB,EAAc,EAAG,KAEvBxI,QAAQmN,KAAK,4CAKf,MAAMC,GA5MQC,EA4MmB/F,EA5MN+D,EA4MW4B,EAzMjC,CACL,CAAC,EAAG,GACJ,CAAC,IAJHI,GAAO,IAIQ,IAAK,IAHpBhC,GAAO,IAGyB,KAC9B,CAAC,GAAKgC,GAAO,IAAK,GAAKhC,GAAO,KAC9B,CAAC,IAAMgC,GAAO,IAAK,IAAMhC,GAAO,KAChC,CAAC,KAAOgC,GAAO,IAAK,KAAOhC,GAAO,KAClC,CAAC,MAAQgC,GAAO,IAAK,MAAQhC,GAAO,OATxC,IAAkBgC,EAAahC,EA6M3B,MAAM9J,EAAkB,GAGxB,IAAK,MAAO6J,EAAOC,KAAQ+B,EACzB,IAAK,IAAIrE,EAAMqC,EAAOrC,GAAOsC,EAAKtC,IAChC,GAAImE,EAAG9C,SAASrB,GACd,IAAK,MAAMuE,KAAKJ,EAAG9C,SAASrB,GAC1BxH,EAAOM,KAAK,IAAIiH,EAAMwE,EAAE3K,KAAM2K,EAAE1K,KAAMmG,IAQ9C,MAAMwE,EAAQL,EAAGR,YAAYxK,OAC7B,IAAI2F,EAAS,KACb,MAAM2F,EAAS5H,KAAK0B,IAAIA,GAAO,GAAIiG,EAAQ,GACrCE,EAAS7H,KAAK0B,IAAI2F,GAAO,GAAIM,EAAQ,GAC3C,IAAK,IAAIjM,EAAIkM,EAAQlM,GAAKmM,IAAUnM,EAAG,CACrC,MAAMoM,EAAKR,EAAGR,YAAYpL,GACtBoM,KACG7F,GAAU6F,EAAGnF,UAAUV,GAAU,KACpCA,EAAS6F,EAGf,CAEA,OAAO9F,EAAerG,EAAQsG,EAChC,ECzOF,SAAS8F,EAAOC,EAAaC,GAC3B,OAAOjI,KAAKC,MAAM+H,EAAM,GAAKC,EAC/B,CAEe,MAAMC,UAAY3E,EAI/B,WAAA3K,CAAYuP,GACVC,MAAMD,GACNxQ,KAAK4O,aAAe,EACpB5O,KAAK0Q,MAAQ,EACb1Q,KAAK2Q,SAAW,CAClB,CACA,eAAM5D,CAAUC,EAAiBf,EAAgB,CAAC,GAChD,MAAMgB,QAAkBjN,KAAKqM,MAAMJ,GAC7BiB,EAAQD,EAAUE,YAAYH,GACpC,QAAc/D,IAAViE,EACF,OAAQ,EAGV,IADYD,EAAUf,QAAQgB,GAE5B,OAAQ,EAEV,MAAM,MAAEE,GAAUH,EAAUf,QAAQgB,GACpC,OAAIE,EACKA,EAAML,WAEP,CACV,CAEA,QAAA6D,GACE,MAAM,IAAI1Q,MAAM,sCAClB,CAEA,YAAA2Q,CAAaxF,EAAevG,GAC1B,MAAM0I,EAAcnC,EAAMkC,YAAYzI,GAChC2I,EACU,MAAdD,EAAwB,uBAAyB,iBAC7CE,EAAS,CAAE,EAAG,UAAW,EAAG,MAAO,EAAG,OAAsB,GAAdF,GACpD,IAAKE,EACH,MAAM,IAAIxN,MAAM,qCAAqCsN,KAEvD,MAAMG,EAAgB,CACpBC,IAAKvC,EAAMkC,YAAYzI,EAAS,GAChC+I,MAAOxC,EAAMkC,YAAYzI,EAAS,GAClCgJ,IAAKzC,EAAMkC,YAAYzI,EAAS,KAE5BiJ,EAAY1C,EAAMkC,YAAYzI,EAAS,IACvCkJ,EAAWD,EAAYE,OAAOC,aAAaH,GAAa,KACxDI,EAAY9C,EAAMkC,YAAYzI,EAAS,IACvCsJ,EAAoB/C,EAAMkC,YAAYzI,EAAS,KAE/C,YAAEuJ,EAAW,YAAElB,GAAgBnN,KAAKsO,gBACxCjD,EAAM/E,MAAMxB,EAAS,GAAIA,EAAS,GAAKsJ,IAGzC,MAAO,CACLC,cACAlB,cACAgB,YACAH,WACAL,gBACAD,SACAD,iBAEJ,CAEA,eAAAa,CAAgBgB,GACd,IAAIC,EAAY,EACZC,EAAgB,EACpB,MAAMnB,EAAc,GACdlB,EAAsC,CAAC,EAC7C,IAAK,IAAIpJ,EAAI,EAAGA,EAAIuL,EAAW3K,OAAQZ,GAAK,EAC1C,IAAKuL,EAAWvL,GAAI,CAClB,GAAIyL,EAAgBzL,EAAG,CACrB,IAAIiJ,EAAUsC,EAAWpE,SAAS,OAAQsE,EAAezL,GACzDiJ,EAAUhN,KAAK+L,aAAaiB,GAC5BqB,EAAYkB,GAAavC,EACzBG,EAAYH,GAAWuC,CACzB,CACAC,EAAgBzL,EAAI,EACpBwL,GAAa,CACf,CAEF,MAAO,CAAEpC,cAAakB,cACxB,CAIA,YAAM3B,CAAOT,EAAgB,CAAC,GAC5B,MAAMZ,QAAc,IAAA1H,aAAY3D,KAAKgG,WAAWqH,SAASpB,IAGzD,IAAI6E,EACJ,GArGe,WAqGXzF,EAAMjC,aAAa,GACrB0H,EAAa,MACR,IAtGQ,WAsGJzF,EAAMjC,aAAa,GAG5B,MAAM,IAAIlJ,MAAM,kBAFhB4Q,EAAa,CAIf,CAEA9Q,KAAK2Q,SAAWtF,EAAMkC,YAAY,GAClCvN,KAAK0Q,MAAQrF,EAAMkC,YAAY,GAC/BvN,KAAK4O,eAAiB,GAAyB,GAAlB5O,KAAK0Q,MAAQ,IAAW,GAAK,EAC1D,MAAMtB,EAAe,IAAMpP,KAAK2Q,SAAwB,EAAb3Q,KAAK0Q,OAC1CK,EAAY1F,EAAMkC,YAAY,IAC9ByD,EACJD,GAAaA,GAAa,GACtB/Q,KAAK6Q,aAAaxF,EAAO,IACzB,CACEgD,YAAa,GACblB,YAAa,CAAC,EACda,SAAU,KACVL,cAAe,CAAEC,IAAK,EAAGC,MAAO,EAAGC,IAAK,GACxCL,eAAgB,uBAChBC,OAAQ,WAEVJ,EAAWjC,EAAMkC,YAAY,GAAKwD,GAGxC,IAAIxC,EACAC,EAAa,GAAKuC,EAAY,EAClC,MAAM7E,EAAU,IAAI9E,MAAMkG,GAAUpM,KAAK,GAAGuN,KAAI,KAE9C,MAAMC,EAAWrD,EAAMkC,YAAYiB,GACnCA,GAAc,EACd,MAAM3B,EAAoC,CAAC,EAC3C,IAAIO,EACJ,IAAK,IAAIuB,EAAI,EAAGA,EAAID,EAAUC,GAAK,EAAG,CACpC,MAAMnD,EAAMH,EAAMjC,aAAaoF,GAC/B,GAAIhD,EAAMxL,KAAK4O,aAGbxB,EAAQpN,KAAK8O,eAAezD,EAAOmD,EAAa,GAChDA,GAAc,OACT,CACL,MAAMyC,EAAU7F,EAAUC,EAAOmD,EAAa,GAC9CD,EAAgBvO,KAAKsM,eAAeiC,EAAe0C,GACnD,MAAMpC,EAAaxD,EAAMkC,YAAYiB,EAAa,IAClDA,GAAc,GACd,MAAMxK,EAAS,IAAIoD,MAAMyH,GACzB,IAAK,IAAIE,EAAI,EAAGA,EAAIF,EAAYE,GAAK,EAAG,CACtC,MAAMC,EAAI5D,EAAUC,EAAOmD,GACrBS,EAAI7D,EAAUC,EAAOmD,EAAa,GACxCA,GAAc,GAEdxK,EAAO+K,GAAK,IAAIxD,EAAMyD,EAAGC,EAAGzD,EAC9B,CACAqB,EAASrB,GAAOxH,CAClB,CACF,CAEA,MAAO,CAAE6I,WAAUO,QAAO,IAG5B,MAAO,IACF4D,EACHE,KAAK,EACL5D,WACA+B,aAAc,MACdd,gBACAuC,aACA5E,UACAwE,MAAO1Q,KAAK0Q,MACZ9B,aAAc5O,KAAK4O,aACnBQ,eAEJ,CAEA,cAAAN,CAAezD,EAAevG,GAO5B,MAAO,CAAEiI,UANS9C,EAChB,gBACEoB,EAAM/E,MAAMxB,EAAS,GAAIA,EAAS,KAClC,IAIN,CAEA,oBAAM2K,CACJzC,EACAjD,EACA2F,EACAzD,EAAgB,CAAC,GAEblC,EAAM,IACRA,EAAM,GAGR,MAAMkD,QAAkBjN,KAAKqM,MAAMJ,GAC7BiB,EAAQD,EAAUE,YAAYH,GACpC,QAAc/D,IAAViE,EACF,MAAO,GAET,MAAMyC,EAAK1C,EAAUf,QAAQgB,GAC7B,IAAKyC,EACH,MAAO,GAKT,MAAME,EAAkB7P,KAAKmR,SAASpH,EAAK2F,GACrC1L,EAAkB,GAGxB,IAAK,MAAO6J,EAAOC,KAAQ+B,EACzB,IAAK,IAAIrE,EAAMqC,EAAOrC,GAAOsC,EAAKtC,IAChC,GAAImE,EAAG9C,SAASrB,GACd,IAAK,MAAMuE,KAAKJ,EAAG9C,SAASrB,GAC1BxH,EAAOM,KAAK,IAAIiH,EAAMwE,EAAE3K,KAAM2K,EAAE1K,KAAMmG,IAM9C,OAAOnB,EAAerG,EAAQ,IAAIiH,EAAc,EAAG,GACrD,CAKA,QAAAkG,CAASrB,EAAahC,IACpBgC,GAAO,GACG,IACRA,EAAM,GAEJhC,EAAM,GAAK,KACbA,EAAM,GAAK,IAEbA,GAAO,EACP,IAAIsD,EAAI,EACJC,EAAI,EACJC,EAAItR,KAAK2Q,SAAwB,EAAb3Q,KAAK0Q,MAC7B,MAAMa,EAAO,GACb,KAAOH,GAAKpR,KAAK0Q,MAAOY,GAAK,EAAGD,GAAY,EA/OjC,IA+OwC,EAAJD,GAAQA,GAAK,EAAG,CAC7D,MAAMjG,EAAIkG,EAAIjB,EAAON,EAAKwB,GACpBtM,EAAIqM,EAAIjB,EAAOtC,EAAKwD,GAC1B,GAAItM,EAAImG,EAAIoG,EAAK5M,OAAS3E,KAAK4O,aAC7B,MAAM,IAAI1O,MACR,SAAS4P,KAAOhC,oDAAsD9N,KAAK2Q,mBAAmB3Q,KAAK0Q,iEAGvGa,EAAKjN,KAAK,CAAC6G,EAAGnG,GAChB,CACA,OAAOuM,CACT,EC1PF,MAAMC,EACmB,oBAAhBC,YAA8B,IAAIA,YAAY,aAAUxI,EAcjE,SAASyI,EAAQC,GACf,OAAO,IAAIC,SAAQC,GAAWC,WAAWD,EAASF,IACpD,CACe,MAAMI,EA8BnB,WAAA9Q,EAAY,KACVgF,EAAI,WACJD,EAAU,IACVgM,EAAG,QACHC,EAAO,OACPC,EAAM,cACNC,EAAa,QACbC,EAAO,OACPC,EAAM,cACNC,EAAa,UACbC,EAAY,IAAG,cACf1G,EAAgBC,GAAKA,EAAC,eACtB0G,EAAiB,UAejB,GAAIxM,EACFhG,KAAKgG,WAAaA,OACb,GAAIC,EACTjG,KAAKgG,WAAa,IAAI,KAAUC,OAC3B,KAAI+L,EAGT,MAAM,IAAI5Q,UAAU,0CAFpBpB,KAAKgG,WAAa,IAAI,KAAWgM,EAGnC,CAEA,GAAIG,EACFnS,KAAK8G,MAAQ,IAAI,EAAI,CACnBd,WAAYmM,EACZtG,uBAEG,GAAIyG,EACTtS,KAAK8G,MAAQ,IAAIyJ,EAAI,CACnBvK,WAAYsM,EACZzG,uBAEG,GAAIoG,EACTjS,KAAK8G,MAAQ,IAAI,EAAI,CACnBd,WAAY,IAAI,KAAUiM,GAC1BpG,uBAEG,GAAIuG,EACTpS,KAAK8G,MAAQ,IAAIyJ,EAAI,CACnBvK,WAAY,IAAI,KAAUoM,GAC1BvG,uBAEG,GAAI5F,EACTjG,KAAK8G,MAAQ,IAAI,EAAI,CACnBd,WAAY,IAAI,KAAU,GAAGC,SAC7B4F,uBAEG,GAAIwG,EACTrS,KAAK8G,MAAQ,IAAIyJ,EAAI,CACnBvK,WAAY,IAAI,KAAWqM,UAExB,GAAIH,EACTlS,KAAK8G,MAAQ,IAAI,EAAI,CACnBd,WAAY,IAAI,KAAWkM,SAExB,KAAIF,EAKT,MAAM,IAAI5Q,UACR,sFALFpB,KAAK8G,MAAQ,IAAI,EAAI,CACnBd,WAAY,IAAI,KAAW,GAAGgM,UAMlC,CAEAhS,KAAK+L,aAAeF,EACpB7L,KAAKuS,UAAYA,EACjBvS,KAAKyS,WAAa,IAAIzR,EAAA0R,EAAwC,CAC5DvR,MAAO,IAAI,IAAJ,CAAQ,CAAEwR,QAAStK,KAAKC,MAAMkK,EAAiB,SACtDtR,KAAM,CAACsP,EAAazQ,IAClBC,KAAK4S,UAAUpC,EAAM,CAAEzQ,YAE7B,CAeA,cAAM8S,CACJ7F,EACAsE,EACAtM,EACAiH,G,MAEA,IAAIlM,EAEAa,EADAkS,EAAmB,CAAC,EAGJ,mBAAT7G,EACTrL,EAAWqL,GAEX6G,EAAU7G,EACVrL,EAAWqL,EAAK8G,aAChBhT,EAASkM,EAAKlM,QAGhB,MAAMiT,QAAiBhT,KAAK8G,MAAMkF,YAAY8G,GAC9C3I,EAAiBpK,GACjB,MAAM8N,EAAQyD,QAAAA,EAAK,EACbxD,EAAM9I,QAAAA,EAAKgO,EAAS5D,aAC1B,KAAMvB,GAASC,GACb,MAAM,IAAI1M,UACR,8EAGJ,GAAIyM,IAAUC,EACZ,OAGF,MAAM9J,QAAehE,KAAK8G,MAAM2I,eAAezC,EAASa,EAAOC,EAAKgF,GACpE3I,EAAiBpK,GAGjB,IAAIkT,EAAOC,KAAKC,MAChB,IAAK,MAAMpD,KAAK/L,EAAQ,CACtB,IAAIoP,EACJ,MAAM,OAAExN,EAAM,WAAEF,EAAU,WAAEC,SAAqB3F,KAAKyS,WAAWpR,IAC/D0O,EAAE7E,WACF6E,EACAhQ,GAGFoK,EAAiBpK,GACjB,IAAIsT,EAAa,EACbvP,EAAM,EACV,KAAOuP,EAAazN,EAAOjB,QAAQ,CACjC,MAAMmH,EAAIlG,EAAO0N,QAAQ,KAAMD,GAC/B,IAAW,IAAPvH,EACF,MAEF,MAAMX,EAAIvF,EAAOU,MAAM+M,EAAYvH,GAC7ByH,EAAyB,QAAlB,EAAA/B,aAAO,EAAPA,EAASgC,OAAOrI,UAAE,QAAIA,EAAED,WAGrC,GAAIvF,EAAY,CACd,KAAO0N,EAAatD,EAAE3K,KAAKK,cAAgBE,EAAW7B,OACtDA,GACF,CAGA,MAAM,gBAAE2P,EAAe,SAAEC,GAAa1T,KAAK2T,UACzCX,EACAhG,EACAa,EACAC,EACAyF,GAKF,QAC8BtK,IAA5BmK,QACoBnK,IAApBwK,GACAL,EAA0BK,EAE1B,MAAM,IAAIvT,MACR,yCAAyCkT,OAA6BK,2CAK1E,GAFAL,EAA0BK,EAEtBC,EACF9S,EACE2S,EAAKK,OASc,IAAnBlO,EAAW5B,IACRuP,EAAa1N,EAAW7B,IACzBiM,EAAE3K,KAAKK,aACP,QAEC,QAAwBwD,IAApBwK,GAAiCA,GAAmB3F,EAI7D,OAIE9N,KAAKuS,WAAaU,EAAOC,KAAKC,MAAQnT,KAAKuS,YAC7CU,EAAOC,KAAKC,MACZhJ,EAAiBpK,SACX2R,EAAQ,IAEhB2B,EAAavH,EAAI,CACnB,CACF,CACF,CAEA,iBAAME,CAAYC,EAAgB,CAAC,GACjC,OAAOjM,KAAK8G,MAAMkF,YAAYC,EAChC,CAMA,qBAAM4H,CAAgB5H,EAAgB,CAAC,GACrC,MAAM,cAAEsC,EAAa,SAAEP,EAAQ,aAAEqB,SACzBrP,KAAKgM,YAAYC,GACzB9B,EAAiB8B,EAAKlM,QAGtB,MAAM+T,IAAYvF,aAAa,EAAbA,EAAehJ,gBAAiB,GAAK8J,EAIjDlJ,QAAYnG,KAAK+T,YAAY,EAAGD,EAAU7H,GAC1CZ,QAAc,IAAA1H,OAAMwC,GAG1B,GAAI6H,EAAU,CAEZ,IAAIgG,GAAe,EACnB,MAAMC,EAAc,KAAKC,WAAW,GAC9BC,EAAWnG,EAASkG,WAAW,GACrC,IAAK,IAAInQ,EAAI,EAAGA,EAAIsH,EAAM1G,SACpBZ,IAAMiQ,EAAc,GAAK3I,EAAMtH,KAAOoQ,GADVpQ,GAAK,EAIjCsH,EAAMtH,KAAOkQ,IACfD,EAAcjQ,GAGlB,OAAOsH,EAAM/E,MAAM,EAAG0N,EAAc,EACtC,CACA,OAAO3I,CACT,CAQA,eAAM+I,CAAUnI,EAAgB,CAAC,GAE/B,aADoBjM,KAAK6T,gBAAgB5H,IAC5Bf,SAAS,OACxB,CAMA,+BAAMmJ,CAA0BpI,EAAgB,CAAC,GAE/C,aADuBjM,KAAKgM,YAAYC,IACxBoC,WAClB,CAiBA,SAAAsF,CACEX,EACAsB,EACAC,EACAC,EACAjB,GAEA,MAAM,cAAE5F,EAAa,SAAEK,EAAQ,eAAEP,EAAc,OAAEC,GAAWsF,EAE5D,GAAIhF,GAAYuF,EAAKkB,WAAWzG,GAC9B,MAAO,CAAE0F,UAAU,GAIrB,IAAI,IAAE9F,EAAG,MAAEC,EAAK,IAAEC,GAAQH,EACrBC,IACHA,EAAM,GAEHC,IACHA,EAAQ,GAELC,IACHA,EAAM,GAEO,QAAXJ,IACFI,EAAM,GAER,MAAM4G,EAAYrM,KAAKqH,IAAI9B,EAAKC,EAAOC,GAMvC,IAAI6G,EAAsB,EACtBC,EAAqB,EACrBC,EAAS,GACTpB,GAAkB,IACtB,IAAK,IAAI1P,EAAI,EAAGA,EAAIwP,EAAK5O,OAAS,EAAGZ,GAAK,EACxC,GAAgB,OAAZwP,EAAKxP,IAAeA,IAAMwP,EAAK5O,OAAQ,CACzC,GAAIgQ,IAAwB/G,GAC1B,GACE5N,KAAK+L,aAAawH,EAAKjN,MAAMsO,EAAoB7Q,MACjDuQ,EAEA,MAAO,CAAEZ,UAAU,QAEhB,GAAIiB,IAAwB9G,EAAO,CAMxC,GALA4F,EAAkBqB,SAASvB,EAAKjN,MAAMsO,EAAoB7Q,GAAI,IAEvC,mBAAnB0J,IACFgG,GAAmB,GAEjBA,GAAmBe,EACrB,MAAO,CAAEf,kBAAiBC,UAAU,GAEtC,IAAY,IAAR5F,GAAaA,IAAQD,IAEnB4F,EAAkB,GAAKc,EACzB,MAAO,CAAEd,kBAAiBC,UAAU,EAG1C,MAAO,GAAe,QAAXhG,GAA4C,IAAxBiH,EAC7BE,EAAStB,EAAKjN,MAAMsO,EAAoB7Q,QACnC,GAAI4Q,IAAwB7G,IAGpB,QAAXJ,EACI1N,KAAK+U,WACHtB,EACAoB,EACAtB,EAAKjN,MAAMsO,EAAoB7Q,IAEjC+Q,SAASvB,EAAKjN,MAAMsO,EAAoB7Q,GAAI,MAC7BwQ,EACnB,MAAO,CAAEb,UAAU,GAKvB,GAFAkB,EAAqB7Q,EAAI,EACzB4Q,GAAuB,EACnBA,EAAsBD,EACxB,KAEJ,CAEF,MAAO,CAAEjB,kBAAiBC,UAAU,EACtC,CAEA,UAAAqB,CAAWtB,EAAyBoB,EAAgBG,GAClD,IAAIC,EAAgBxB,EAAkBoB,EAAOlQ,OAM7C,MAAMuQ,EAAQF,EAAKG,SAAS,cAC5B,GAAgB,MAAZH,EAAK,IAAeE,GAajB,GAAIA,EACT,OAAOzB,EAAkB,MAdI,CAC7B,IAAI2B,EAAW,IACf,IAAK,IAAIzG,EAAI,EAAGA,EAAIqG,EAAKrQ,OAAQgK,GAAK,EAAG,CACvC,GAAiB,MAAbyG,GAA6C,SAAzBJ,EAAK1O,MAAMqI,EAAGA,EAAI,GAAe,CACvD,IAAI0G,EAAWL,EAAK1B,QAAQ,IAAK3E,IACf,IAAd0G,IACFA,EAAWL,EAAKrQ,QAElBsQ,EAAgBH,SAASE,EAAK1O,MAAMqI,EAAI,EAAG0G,GAAW,IACtD,KACF,CACAD,EAAWJ,EAAKrG,EAClB,CACF,CAGA,OAAOsG,CACT,CAUA,eAAMlI,CAAUC,EAAiBf,EAAgB,CAAC,GAChD,OAAOjM,KAAK8G,MAAMiG,UAAUC,EAASf,EACvC,CAEA,iBAAM8H,CAAYjQ,EAAavD,EAAc0L,EAAgB,CAAC,GAC5D,MAAMd,EAAI,KAAOmK,MAAM/U,IACjB,UAAE4I,EAAS,OAAEvD,SAAiB5F,KAAKgG,WAAWiB,KAClDkE,EACA,EACA5K,EACAuD,EACAmI,GAGF,OAAOrG,EAAOU,MAAM,EAAG6C,EACzB,CAMA,eAAMyJ,CAAU7C,EAAU9D,EAAgB,CAAC,GAIzC,MAAMlK,QAAa/B,KAAK+T,YACtBhE,EAAE3K,KAAKG,cACPwK,EAAEtE,cACFQ,GAEF,OAAO,QAAgBlK,EAAMgO,EAC/B,E","sources":["../../../node_modules/@gmod/abortable-promise-cache/src/AggregateAbortController.ts","../../../node_modules/@gmod/abortable-promise-cache/src/AggregateStatusReporter.ts","../../../node_modules/@gmod/abortable-promise-cache/src/AbortablePromiseCache.ts","../../../node_modules/@gmod/bgzf-filehandle/src/unzip-pako.ts","../../../node_modules/@gmod/bgzf-filehandle/src/gziIndex.ts","../../../node_modules/@gmod/bgzf-filehandle/src/bgzFilehandle.ts","../../../node_modules/@gmod/tabix/src/util.ts","../../../node_modules/@gmod/tabix/src/virtualOffset.ts","../../../node_modules/@gmod/tabix/src/chunk.ts","../../../node_modules/@gmod/tabix/src/indexFile.ts","../../../node_modules/@gmod/tabix/src/tbi.ts","../../../node_modules/@gmod/tabix/src/csi.ts","../../../node_modules/@gmod/tabix/src/tabixIndexedFile.ts"],"sourcesContent":["class NullSignal {}\n\n/**\n * aggregates a number of abort signals, will only fire the aggregated\n * abort if all of the input signals have been aborted\n */\nexport default class AggregateAbortController {\n  signals = new Set()\n  abortController = new AbortController()\n\n  /**\n   * @param {AbortSignal} [signal] optional AbortSignal to add. if falsy,\n   *  will be treated as a null-signal, and this abortcontroller will no\n   *  longer be abortable.\n   */\n  //@ts-ignore\n  addSignal(signal: AbortSignal = new NullSignal()): void {\n    if (this.signal.aborted) {\n      throw new Error('cannot add a signal, already aborted!')\n    }\n\n    // note that a NullSignal will never fire, so if we\n    // have one this thing will never actually abort\n    this.signals.add(signal)\n    if (signal.aborted) {\n      // handle the abort immediately if it is already aborted\n      // for some reason\n      this.handleAborted(signal)\n    } else if (typeof signal.addEventListener === 'function') {\n      signal.addEventListener('abort', () => {\n        this.handleAborted(signal)\n      })\n    }\n  }\n\n  handleAborted(signal: AbortSignal): void {\n    this.signals.delete(signal)\n    if (this.signals.size === 0) {\n      this.abortController.abort()\n    }\n  }\n\n  get signal(): AbortSignal {\n    return this.abortController.signal\n  }\n\n  abort(): void {\n    this.abortController.abort()\n  }\n}\n","export default class AggregateStatusReporter {\n  callbacks = new Set<Function>()\n  currentMessage: unknown\n\n  addCallback(callback: Function = () => {}): void {\n    this.callbacks.add(callback)\n    callback(this.currentMessage)\n  }\n\n  callback(message: unknown) {\n    this.currentMessage = message\n    for (const elt of this.callbacks) {\n      elt(message)\n    }\n  }\n}\n","import AggregateAbortController from './AggregateAbortController'\nimport AggregateStatusReporter from './AggregateStatusReporter'\n\ninterface Cache<U> {\n  delete: (key: string) => void\n  keys: () => Iterator<string>\n  get: (key: string) => U | undefined\n  set: (key: string, value: U) => void\n  has: (key: string) => boolean\n}\ntype FillCallback<T, U> = (\n  data: T,\n  signal?: AbortSignal,\n  statusCallback?: Function,\n) => Promise<U>\n\ninterface Entry<U> {\n  aborter: AggregateAbortController\n  settled: boolean\n  readonly aborted: boolean\n  statusReporter: AggregateStatusReporter\n  promise: Promise<U>\n}\nexport default class AbortablePromiseCache<T, U> {\n  /**\n   * @param {object} args constructor args\n   * @param {Function} args.fill fill callback, will be called with sig `fill(data, signal)`\n   * @param {object} args.cache backing store to use, must implement `get(key)`, `set(key, val)`,\n   *   `delete(key)`, and `keys() -> iterator`\n   */\n\n  private cache: Cache<Entry<U>>\n  private fillCallback: FillCallback<T, U>\n\n  constructor({\n    fill,\n    cache,\n  }: {\n    fill: FillCallback<T, U>\n    cache: Cache<Entry<U>>\n  }) {\n    if (typeof fill !== 'function') {\n      throw new TypeError('must pass a fill function')\n    }\n    if (typeof cache !== 'object') {\n      throw new TypeError('must pass a cache object')\n    }\n    if (\n      typeof cache.get !== 'function' ||\n      typeof cache.set !== 'function' ||\n      typeof cache.delete !== 'function'\n    ) {\n      throw new TypeError(\n        'cache must implement get(key), set(key, val), and and delete(key)',\n      )\n    }\n\n    this.cache = cache\n    this.fillCallback = fill\n  }\n\n  static isAbortException(exception: Error) {\n    return (\n      // DOMException\n      exception.name === 'AbortError' ||\n      // standard-ish non-DOM abort exception\n      //@ts-ignore\n      exception.code === 'ERR_ABORTED' ||\n      // stringified DOMException\n      exception.message === 'AbortError: aborted' ||\n      // stringified standard-ish exception\n      exception.message === 'Error: aborted'\n    )\n  }\n\n  evict(key: string, entry: Entry<U>) {\n    if (this.cache.get(key) === entry) {\n      this.cache.delete(key)\n    }\n  }\n\n  fill(key: string, data: T, signal?: AbortSignal, statusCallback?: Function) {\n    const aborter = new AggregateAbortController()\n    const statusReporter = new AggregateStatusReporter()\n    statusReporter.addCallback(statusCallback)\n    const newEntry: Entry<U> = {\n      aborter: aborter,\n      promise: this.fillCallback(data, aborter.signal, (message: unknown) => {\n        statusReporter.callback(message)\n      }),\n      settled: false,\n      statusReporter,\n      get aborted() {\n        return this.aborter.signal.aborted\n      },\n    }\n    newEntry.aborter.addSignal(signal)\n\n    // remove the fill from the cache when its abortcontroller fires, if still in there\n    newEntry.aborter.signal.addEventListener('abort', () => {\n      if (!newEntry.settled) {\n        this.evict(key, newEntry)\n      }\n    })\n\n    // chain off the cached promise to record when it settles\n    newEntry.promise\n      .then(\n        () => {\n          newEntry.settled = true\n        },\n        () => {\n          newEntry.settled = true\n\n          // if the fill throws an error (including abort) and is still in the cache, remove it\n          this.evict(key, newEntry)\n        },\n      )\n      .catch(error => {\n        // this will only be reached if there is some kind of\n        // bad bug in this library\n        console.error(error)\n        throw error\n      })\n\n    this.cache.set(key, newEntry)\n  }\n\n  static checkSinglePromise<U>(promise: Promise<U>, signal?: AbortSignal) {\n    // check just this signal for having been aborted, and abort the\n    // promise if it was, regardless of what happened with the cached\n    // response\n    function checkForSingleAbort() {\n      if (signal?.aborted) {\n        throw Object.assign(new Error('aborted'), { code: 'ERR_ABORTED' })\n      }\n    }\n\n    return promise.then(\n      result => {\n        checkForSingleAbort()\n        return result\n      },\n      error => {\n        checkForSingleAbort()\n        throw error\n      },\n    )\n  }\n\n  has(key: string): boolean {\n    return this.cache.has(key)\n  }\n\n  /**\n   * Callback for getting status of the pending async\n   *\n   * @callback statusCallback\n   * @param {any} status, current status string or message object\n   */\n\n  /**\n   * @param {any} key cache key to use for this request\n   * @param {any} data data passed as the first argument to the fill callback\n   * @param {AbortSignal} [signal] optional AbortSignal object that aborts the request\n   * @param {statusCallback} a callback to get the current status of a pending async operation\n   */\n  get(\n    key: string,\n    data: T,\n    signal?: AbortSignal,\n    statusCallback?: Function,\n  ): Promise<U> {\n    if (!signal && data instanceof AbortSignal) {\n      throw new TypeError(\n        'second get argument appears to be an AbortSignal, perhaps you meant to pass `null` for the fill data?',\n      )\n    }\n    const cacheEntry = this.cache.get(key)\n\n    if (cacheEntry) {\n      if (cacheEntry.aborted && !cacheEntry.settled) {\n        // if it's aborted but has not realized it yet, evict it and redispatch\n        this.evict(key, cacheEntry)\n        return this.get(key, data, signal, statusCallback)\n      }\n\n      if (cacheEntry.settled) {\n        // too late to abort, just return it\n        return cacheEntry.promise\n      }\n\n      // request is in-flight, add this signal to its list of signals,\n      // or if there is no signal, the aborter will become non-abortable\n      cacheEntry.aborter.addSignal(signal)\n      cacheEntry.statusReporter.addCallback(statusCallback)\n\n      return AbortablePromiseCache.checkSinglePromise(\n        cacheEntry.promise,\n        signal,\n      )\n    }\n\n    // if we got here, it is not in the cache. fill.\n    this.fill(key, data, signal, statusCallback)\n    return AbortablePromiseCache.checkSinglePromise(\n      //see https://www.typescriptlang.org/docs/handbook/2/everyday-types.html#non-null-assertion-operator-postfix-\n\n      this.cache.get(key)!.promise,\n      signal,\n    )\n  }\n\n  /**\n   * delete the given entry from the cache. if it exists and its fill request has\n   * not yet settled, the fill will be signaled to abort.\n   *\n   * @param {any} key\n   */\n  delete(key: string) {\n    const cachedEntry = this.cache.get(key)\n    if (cachedEntry) {\n      if (!cachedEntry.settled) {\n        cachedEntry.aborter.abort()\n      }\n      this.cache.delete(key)\n    }\n  }\n\n  /**\n   * Clear all requests from the cache. Aborts any that have not settled.\n   * @returns {number} count of entries deleted\n   */\n  clear() {\n    // iterate without needing regenerator-runtime\n    const keyIter = this.cache.keys()\n    let deleteCount = 0\n    for (let result = keyIter.next(); !result.done; result = keyIter.next()) {\n      this.delete(result.value)\n      deleteCount += 1\n    }\n    return deleteCount\n  }\n}\n","import { Buffer } from 'buffer'\n//@ts-ignore\nimport { Z_SYNC_FLUSH, Inflate } from 'pako'\n\ninterface VirtualOffset {\n  blockPosition: number\n  dataPosition: number\n}\ninterface Chunk {\n  minv: VirtualOffset\n  maxv: VirtualOffset\n}\n\n// browserify-zlib, which is the zlib shim used by default in webpacked code,\n// does not properly uncompress bgzf chunks that contain more than\n// one bgzf block, so export an unzip function that uses pako directly\n// if we are running in a browser.\nasync function unzip(inputData: Buffer) {\n  try {\n    let strm\n    let pos = 0\n    let i = 0\n    const chunks = []\n    let totalSize = 0\n    let inflator\n    do {\n      const remainingInput = inputData.subarray(pos)\n      inflator = new Inflate()\n      //@ts-ignore\n      ;({ strm } = inflator)\n      inflator.push(remainingInput, Z_SYNC_FLUSH)\n      if (inflator.err) {\n        throw new Error(inflator.msg)\n      }\n\n      pos += strm.next_in\n      chunks[i] = inflator.result as Uint8Array\n      totalSize += chunks[i].length\n      i += 1\n    } while (strm.avail_in)\n\n    const result = new Uint8Array(totalSize)\n    for (let i = 0, offset = 0; i < chunks.length; i++) {\n      result.set(chunks[i], offset)\n      offset += chunks[i].length\n    }\n    return Buffer.from(result)\n  } catch (e) {\n    //cleanup error message\n    if (`${e}`.match(/incorrect header check/)) {\n      throw new Error(\n        'problem decompressing block: incorrect gzip header check',\n      )\n    }\n    throw e\n  }\n}\n\n// similar to pakounzip, except it does extra counting\n// to return the positions of compressed and decompressed\n// data offsets\nasync function unzipChunk(inputData: Buffer) {\n  try {\n    let strm\n    let cpos = 0\n    let dpos = 0\n    const blocks = []\n    const cpositions = []\n    const dpositions = []\n    do {\n      const remainingInput = inputData.slice(cpos)\n      const inflator = new Inflate()\n      // @ts-ignore\n      ;({ strm } = inflator)\n      inflator.push(remainingInput, Z_SYNC_FLUSH)\n      if (inflator.err) {\n        throw new Error(inflator.msg)\n      }\n\n      const buffer = Buffer.from(inflator.result)\n      blocks.push(buffer)\n\n      cpositions.push(cpos)\n      dpositions.push(dpos)\n\n      cpos += strm.next_in\n      dpos += buffer.length\n    } while (strm.avail_in)\n\n    const buffer = Buffer.concat(blocks)\n    return { buffer, cpositions, dpositions }\n  } catch (e) {\n    //cleanup error message\n    if (`${e}`.match(/incorrect header check/)) {\n      throw new Error(\n        'problem decompressing block: incorrect gzip header check',\n      )\n    }\n    throw e\n  }\n}\n\n// similar to unzipChunk above but slices (0,minv.dataPosition) and\n// (maxv.dataPosition,end) off\nasync function unzipChunkSlice(inputData: Buffer, chunk: Chunk) {\n  try {\n    let strm\n    const { minv, maxv } = chunk\n    let cpos = minv.blockPosition\n    let dpos = minv.dataPosition\n    const chunks = []\n    const cpositions = []\n    const dpositions = []\n\n    let totalSize = 0\n    let i = 0\n    do {\n      const remainingInput = inputData.subarray(cpos - minv.blockPosition)\n      const inflator = new Inflate()\n      // @ts-ignore\n      ;({ strm } = inflator)\n      inflator.push(remainingInput, Z_SYNC_FLUSH)\n      if (inflator.err) {\n        throw new Error(inflator.msg)\n      }\n\n      const buffer = inflator.result\n      chunks.push(buffer as Uint8Array)\n      let len = buffer.length\n\n      cpositions.push(cpos)\n      dpositions.push(dpos)\n      if (chunks.length === 1 && minv.dataPosition) {\n        // this is the first chunk, trim it\n        chunks[0] = chunks[0].subarray(minv.dataPosition)\n        len = chunks[0].length\n      }\n      const origCpos = cpos\n      cpos += strm.next_in\n      dpos += len\n\n      if (origCpos >= maxv.blockPosition) {\n        // this is the last chunk, trim it and stop decompressing\n        // note if it is the same block is minv it subtracts that already\n        // trimmed part of the slice length\n\n        chunks[i] = chunks[i].subarray(\n          0,\n          maxv.blockPosition === minv.blockPosition\n            ? maxv.dataPosition - minv.dataPosition + 1\n            : maxv.dataPosition + 1,\n        )\n\n        cpositions.push(cpos)\n        dpositions.push(dpos)\n        totalSize += chunks[i].length\n        break\n      }\n      totalSize += chunks[i].length\n      i++\n    } while (strm.avail_in)\n\n    const result = new Uint8Array(totalSize)\n    for (let i = 0, offset = 0; i < chunks.length; i++) {\n      result.set(chunks[i], offset)\n      offset += chunks[i].length\n    }\n    const buffer = Buffer.from(result)\n\n    return { buffer, cpositions, dpositions }\n  } catch (e) {\n    //cleanup error message\n    if (`${e}`.match(/incorrect header check/)) {\n      throw new Error(\n        'problem decompressing block: incorrect gzip header check',\n      )\n    }\n    throw e\n  }\n}\n\nfunction nodeUnzip() {\n  throw new Error('nodeUnzip not implemented.')\n}\n\nexport { unzip, unzipChunk, unzipChunkSlice, unzip as pakoUnzip, nodeUnzip }\n","import Long from 'long'\nimport { Buffer } from 'buffer'\nimport { LocalFile, GenericFilehandle } from 'generic-filehandle'\n\n// const COMPRESSED_POSITION = 0\nconst UNCOMPRESSED_POSITION = 1\n\nexport default class GziIndex {\n  filehandle: GenericFilehandle\n\n  index?: any\n\n  constructor({\n    filehandle,\n    path,\n  }: {\n    filehandle?: GenericFilehandle\n    path?: string\n  }) {\n    if (filehandle) {\n      this.filehandle = filehandle\n    } else if (path) {\n      this.filehandle = new LocalFile(path)\n    } else {\n      throw new TypeError('either filehandle or path must be defined')\n    }\n  }\n\n  _readLongWithOverflow(buf: Buffer, offset = 0, unsigned = true) {\n    //@ts-ignore\n    const long = Long.fromBytesLE(buf.slice(offset, offset + 8), unsigned)\n    if (\n      long.greaterThan(Number.MAX_SAFE_INTEGER) ||\n      long.lessThan(Number.MIN_SAFE_INTEGER)\n    ) {\n      throw new TypeError('integer overflow')\n    }\n\n    return long.toNumber()\n  }\n\n  _getIndex() {\n    if (!this.index) {\n      this.index = this._readIndex()\n    }\n    return this.index\n  }\n\n  async _readIndex() {\n    let buf = Buffer.allocUnsafe(8)\n    await this.filehandle.read(buf, 0, 8, 0)\n    const numEntries = this._readLongWithOverflow(buf, 0, true)\n    if (!numEntries) {\n      return [[0, 0]]\n    }\n\n    const entries = new Array(numEntries + 1)\n    entries[0] = [0, 0]\n\n    // TODO rewrite this to make an index-index that stays in memory\n    const bufSize = 8 * 2 * numEntries\n    if (bufSize > Number.MAX_SAFE_INTEGER) {\n      throw new TypeError('integer overflow')\n    }\n    buf = Buffer.allocUnsafe(bufSize)\n    await this.filehandle.read(buf, 0, bufSize, 8)\n    for (let entryNumber = 0; entryNumber < numEntries; entryNumber += 1) {\n      const compressedPosition = this._readLongWithOverflow(\n        buf,\n        entryNumber * 16,\n      )\n      const uncompressedPosition = this._readLongWithOverflow(\n        buf,\n        entryNumber * 16 + 8,\n      )\n      entries[entryNumber + 1] = [compressedPosition, uncompressedPosition]\n    }\n\n    return entries\n  }\n\n  async getLastBlock() {\n    const entries = await this._getIndex()\n    if (!entries.length) {\n      return undefined\n    }\n    return entries[entries.length - 1]\n  }\n\n  async getRelevantBlocksForRead(length: number, position: number) {\n    const endPosition = position + length\n    if (length === 0) {\n      return []\n    }\n    const entries = await this._getIndex()\n    const relevant = []\n\n    // binary search to find the block that the\n    // read starts in and extend forward from that\n    const compare = (entry: any, nextEntry: any) => {\n      const uncompressedPosition = entry[UNCOMPRESSED_POSITION]\n      const nextUncompressedPosition = nextEntry\n        ? nextEntry[UNCOMPRESSED_POSITION]\n        : Infinity\n      // block overlaps read start\n      if (\n        uncompressedPosition <= position &&\n        nextUncompressedPosition > position\n      ) {\n        return 0\n        // block is before read start\n      }\n      if (uncompressedPosition < position) {\n        return -1\n      }\n      // block is after read start\n      return 1\n    }\n\n    let lowerBound = 0\n    let upperBound = entries.length - 1\n    let searchPosition = Math.floor(entries.length / 2)\n\n    let comparison = compare(\n      entries[searchPosition],\n      entries[searchPosition + 1],\n    )\n    while (comparison !== 0) {\n      if (comparison > 0) {\n        upperBound = searchPosition - 1\n      } else if (comparison < 0) {\n        lowerBound = searchPosition + 1\n      }\n      searchPosition = Math.ceil((upperBound - lowerBound) / 2) + lowerBound\n      comparison = compare(entries[searchPosition], entries[searchPosition + 1])\n    }\n\n    // here's where we read forward\n    relevant.push(entries[searchPosition])\n    let i = searchPosition + 1\n    for (; i < entries.length; i += 1) {\n      relevant.push(entries[i])\n      if (entries[i][UNCOMPRESSED_POSITION] >= endPosition) {\n        break\n      }\n    }\n    if (relevant[relevant.length - 1][UNCOMPRESSED_POSITION] < endPosition) {\n      relevant.push([])\n    }\n    return relevant\n  }\n}\n","import { Buffer } from 'buffer'\nimport { LocalFile, GenericFilehandle } from 'generic-filehandle'\n\n// locals\nimport { unzip } from './unzip'\nimport GziIndex from './gziIndex'\n\nexport default class BgzFilehandle {\n  filehandle: GenericFilehandle\n  gzi: GziIndex\n\n  constructor({\n    filehandle,\n    path,\n    gziFilehandle,\n    gziPath,\n  }: {\n    filehandle?: GenericFilehandle\n    path?: string\n    gziFilehandle?: GenericFilehandle\n    gziPath?: string\n  }) {\n    if (filehandle) {\n      this.filehandle = filehandle\n    } else if (path) {\n      this.filehandle = new LocalFile(path)\n    } else {\n      throw new TypeError('either filehandle or path must be defined')\n    }\n\n    if (!gziFilehandle && !gziPath && !path) {\n      throw new TypeError('either gziFilehandle or gziPath must be defined')\n    }\n\n    this.gzi = new GziIndex({\n      filehandle: gziFilehandle,\n      path: !gziFilehandle && !gziPath && path ? gziPath : `${path}.gzi`,\n    })\n  }\n\n  async stat() {\n    const compressedStat = await this.filehandle.stat()\n    return Object.assign(compressedStat, {\n      size: await this.getUncompressedFileSize(),\n      blocks: undefined,\n      blksize: undefined,\n    })\n  }\n\n  async getUncompressedFileSize() {\n    // read the last block's ISIZE (see gzip RFC),\n    // and add it to its uncompressedPosition\n    const [, uncompressedPosition] = await this.gzi.getLastBlock()\n\n    const { size } = await this.filehandle.stat()\n\n    const buf = Buffer.allocUnsafe(4)\n    // note: there should be a 28-byte EOF marker (an empty block) at\n    // the end of the file, so we skip backward past that\n    const { bytesRead } = await this.filehandle.read(buf, 0, 4, size - 28 - 4)\n    if (bytesRead !== 4) {\n      throw new Error('read error')\n    }\n    const lastBlockUncompressedSize = buf.readUInt32LE(0)\n    return uncompressedPosition + lastBlockUncompressedSize\n  }\n\n  async _readAndUncompressBlock(\n    blockBuffer: Buffer,\n    [compressedPosition]: [number],\n    [nextCompressedPosition]: [number],\n  ) {\n    let next = nextCompressedPosition\n    if (!next) {\n      next = (await this.filehandle.stat()).size\n    }\n\n    // read the compressed data into the block buffer\n    const blockCompressedLength = next - compressedPosition\n\n    await this.filehandle.read(\n      blockBuffer,\n      0,\n      blockCompressedLength,\n      compressedPosition,\n    )\n\n    // uncompress it\n    const unzippedBuffer = await unzip(\n      blockBuffer.slice(0, blockCompressedLength),\n    )\n\n    return unzippedBuffer as Buffer\n  }\n\n  async read(buf: Buffer, offset: number, length: number, position: number) {\n    // get the block positions for this read\n    const blockPositions = await this.gzi.getRelevantBlocksForRead(\n      length,\n      position,\n    )\n    const blockBuffer = Buffer.allocUnsafe(32768 * 2)\n    // uncompress the blocks and read from them one at a time to keep memory usage down\n    let destinationOffset = offset\n    let bytesRead = 0\n    for (\n      let blockNum = 0;\n      blockNum < blockPositions.length - 1;\n      blockNum += 1\n    ) {\n      // eslint-disable-next-line no-await-in-loop\n      const uncompressedBuffer = await this._readAndUncompressBlock(\n        blockBuffer,\n        blockPositions[blockNum],\n        blockPositions[blockNum + 1],\n      )\n      const [, uncompressedPosition] = blockPositions[blockNum]\n      const sourceOffset =\n        uncompressedPosition >= position ? 0 : position - uncompressedPosition\n      const sourceEnd =\n        Math.min(\n          position + length,\n          uncompressedPosition + uncompressedBuffer.length,\n        ) - uncompressedPosition\n      if (sourceOffset >= 0 && sourceOffset < uncompressedBuffer.length) {\n        uncompressedBuffer.copy(buf, destinationOffset, sourceOffset, sourceEnd)\n        destinationOffset += sourceEnd - sourceOffset\n        bytesRead += sourceEnd - sourceOffset\n      }\n    }\n\n    return { bytesRead, buffer: buf }\n  }\n}\n","import Chunk from './chunk'\nimport VirtualOffset from './virtualOffset'\n\nexport function longToNumber(long: Long) {\n  if (\n    long.greaterThan(Number.MAX_SAFE_INTEGER) ||\n    long.lessThan(Number.MIN_SAFE_INTEGER)\n  ) {\n    throw new Error('integer overflow')\n  }\n  return long.toNumber()\n}\n\nclass AbortError extends Error {\n  public code: string | undefined\n}\n/**\n * Properly check if the given AbortSignal is aborted. Per the standard, if the\n * signal reads as aborted, this function throws either a DOMException\n * AbortError, or a regular error with a `code` attribute set to `ERR_ABORTED`.\n *\n * For convenience, passing `undefined` is a no-op\n *\n * @param {AbortSignal} [signal] an AbortSignal, or anything with an `aborted`\n * attribute\n *\n * @returns nothing\n */\nexport function checkAbortSignal(signal?: AbortSignal) {\n  if (!signal) {\n    return\n  }\n\n  if (signal.aborted) {\n    if (typeof DOMException !== 'undefined') {\n      throw new DOMException('aborted', 'AbortError')\n    } else {\n      const e = new AbortError('aborted')\n      e.code = 'ERR_ABORTED'\n      throw e\n    }\n  }\n}\n\n/**\n * Skips to the next tick, then runs `checkAbortSignal`.\n * Await this to inside an otherwise synchronous loop to\n * provide a place to break when an abort signal is received.\n * @param {AbortSignal} signal\n */\nexport async function abortBreakPoint(signal?: AbortSignal) {\n  await Promise.resolve()\n  checkAbortSignal(signal)\n}\n\nexport function canMergeBlocks(chunk1: Chunk, chunk2: Chunk) {\n  return (\n    chunk2.minv.blockPosition - chunk1.maxv.blockPosition < 65000 &&\n    chunk2.maxv.blockPosition - chunk1.minv.blockPosition < 5000000\n  )\n}\n\nexport function optimizeChunks(chunks: Chunk[], lowest?: VirtualOffset) {\n  const mergedChunks: Chunk[] = []\n  let lastChunk: Chunk | null = null\n\n  if (chunks.length === 0) {\n    return chunks\n  }\n\n  chunks.sort(function (c0, c1) {\n    const dif = c0.minv.blockPosition - c1.minv.blockPosition\n    return dif !== 0 ? dif : c0.minv.dataPosition - c1.minv.dataPosition\n  })\n\n  chunks.forEach(chunk => {\n    if (!lowest || chunk.maxv.compareTo(lowest) > 0) {\n      if (lastChunk === null) {\n        mergedChunks.push(chunk)\n        lastChunk = chunk\n      } else {\n        if (canMergeBlocks(lastChunk, chunk)) {\n          if (chunk.maxv.compareTo(lastChunk.maxv) > 0) {\n            lastChunk.maxv = chunk.maxv\n          }\n        } else {\n          mergedChunks.push(chunk)\n          lastChunk = chunk\n        }\n      }\n    }\n  })\n\n  return mergedChunks\n}\n","import { Buffer } from 'buffer'\nexport default class VirtualOffset {\n  public blockPosition: number\n  public dataPosition: number\n  constructor(blockPosition: number, dataPosition: number) {\n    this.blockPosition = blockPosition // < offset of the compressed data block\n    this.dataPosition = dataPosition // < offset into the uncompressed data\n  }\n\n  toString() {\n    return `${this.blockPosition}:${this.dataPosition}`\n  }\n\n  compareTo(b: VirtualOffset) {\n    return (\n      this.blockPosition - b.blockPosition || this.dataPosition - b.dataPosition\n    )\n  }\n}\nexport function fromBytes(bytes: Buffer, offset = 0, bigendian = false) {\n  if (bigendian) {\n    throw new Error('big-endian virtual file offsets not implemented')\n  }\n\n  return new VirtualOffset(\n    bytes[offset + 7]! * 0x10000000000 +\n      bytes[offset + 6]! * 0x100000000 +\n      bytes[offset + 5]! * 0x1000000 +\n      bytes[offset + 4]! * 0x10000 +\n      bytes[offset + 3]! * 0x100 +\n      bytes[offset + 2]!,\n    (bytes[offset + 1]! << 8) | bytes[offset]!,\n  )\n}\n","import VirtualOffset from './virtualOffset'\n\n// little class representing a chunk in the index\nexport default class Chunk {\n  public minv: VirtualOffset\n  public maxv: VirtualOffset\n  public bin: number\n  public _fetchedSize?: number\n\n  constructor(\n    minv: VirtualOffset,\n    maxv: VirtualOffset,\n    bin: number,\n    fetchedSize = undefined,\n  ) {\n    this.minv = minv\n    this.maxv = maxv\n    this.bin = bin\n    this._fetchedSize = fetchedSize\n  }\n\n  toUniqueString() {\n     \n    return `${this.minv}..${this.maxv} (bin ${\n      this.bin\n    }, fetchedSize ${this.fetchedSize()})`\n  }\n\n  toString() {\n    return this.toUniqueString()\n  }\n\n  compareTo(b: Chunk) {\n    return (\n      this.minv.compareTo(b.minv) ||\n      this.maxv.compareTo(b.maxv) ||\n      this.bin - b.bin\n    )\n  }\n\n  fetchedSize() {\n    if (this._fetchedSize !== undefined) {\n      return this._fetchedSize\n    }\n    return this.maxv.blockPosition + (1 << 16) - this.minv.blockPosition\n  }\n}\n","import { GenericFilehandle } from 'generic-filehandle'\nimport VirtualOffset from './virtualOffset'\nimport Chunk from './chunk'\n\nexport interface Options {\n  // support having some unknown parts of the options\n  [key: string]: unknown\n  signal?: AbortSignal\n}\n\nexport interface IndexData {\n  refNameToId: Record<string, number>\n  refIdToName: string[]\n  metaChar: string | null\n  columnNumbers: { ref: number; start: number; end: number }\n  coordinateType: string\n  format: string\n  [key: string]: any\n}\n\nexport default abstract class IndexFile {\n  public filehandle: GenericFilehandle\n  public renameRefSeq: (arg0: string) => string\n  private parseP?: Promise<IndexData>\n\n  constructor({\n    filehandle,\n    renameRefSeqs = (n: string) => n,\n  }: {\n    filehandle: GenericFilehandle\n    renameRefSeqs?: (a: string) => string\n  }) {\n    this.filehandle = filehandle\n    this.renameRefSeq = renameRefSeqs\n  }\n\n  public abstract lineCount(refName: string, args: Options): Promise<number>\n\n  protected abstract _parse(opts: Options): Promise<IndexData>\n\n  public async getMetadata(opts: Options = {}) {\n    const { indices: _indices, ...rest } = await this.parse(opts)\n    return rest\n  }\n\n  public abstract blocksForRange(\n    refName: string,\n    start: number,\n    end: number,\n    opts: Options,\n  ): Promise<Chunk[]>\n\n  _findFirstData(\n    currentFdl: VirtualOffset | undefined,\n    virtualOffset: VirtualOffset,\n  ) {\n    if (currentFdl) {\n      return currentFdl.compareTo(virtualOffset) > 0\n        ? virtualOffset\n        : currentFdl\n    } else {\n      return virtualOffset\n    }\n  }\n\n  async parse(opts: Options = {}) {\n    if (!this.parseP) {\n      this.parseP = this._parse(opts).catch((e: unknown) => {\n        this.parseP = undefined\n        throw e\n      })\n    }\n    return this.parseP\n  }\n\n  async hasRefSeq(seqId: number, opts: Options = {}) {\n    const idx = await this.parse(opts)\n    return !!idx.indices[seqId]?.binIndex\n  }\n}\n","import Long from 'long'\nimport { Buffer } from 'buffer'\nimport VirtualOffset, { fromBytes } from './virtualOffset'\nimport Chunk from './chunk'\nimport { unzip } from '@gmod/bgzf-filehandle'\nimport { longToNumber, optimizeChunks, checkAbortSignal } from './util'\nimport IndexFile, { Options } from './indexFile'\n\nconst TBI_MAGIC = 21578324 // TBI\\1\nconst TAD_LIDX_SHIFT = 14\n\n/**\n * calculate the list of bins that may overlap with region [beg,end)\n * (zero-based half-open)\n */\nfunction reg2bins(beg: number, end: number) {\n  beg += 1 // < convert to 1-based closed\n  end -= 1\n  return [\n    [0, 0],\n    [1 + (beg >> 26), 1 + (end >> 26)],\n    [9 + (beg >> 23), 9 + (end >> 23)],\n    [73 + (beg >> 20), 73 + (end >> 20)],\n    [585 + (beg >> 17), 585 + (end >> 17)],\n    [4681 + (beg >> 14), 4681 + (end >> 14)],\n  ] as const\n}\n\nexport default class TabixIndex extends IndexFile {\n  async lineCount(refName: string, opts: Options = {}) {\n    const indexData = await this.parse(opts)\n    const refId = indexData.refNameToId[refName]\n    if (refId === undefined) {\n      return -1\n    }\n    const idx = indexData.indices[refId]\n    if (!idx) {\n      return -1\n    }\n    const { stats } = indexData.indices[refId]\n    if (stats) {\n      return stats.lineCount\n    }\n    return -1\n  }\n\n  // fetch and parse the index\n  async _parse(opts: Options = {}) {\n    const buf = await this.filehandle.readFile(opts)\n    const bytes = await unzip(buf)\n    checkAbortSignal(opts.signal)\n\n    // check TBI magic numbers\n    if (bytes.readUInt32LE(0) !== TBI_MAGIC /* \"TBI\\1\" */) {\n      throw new Error('Not a TBI file')\n      // TODO: do we need to support big-endian TBI files?\n    }\n\n    // number of reference sequences in the index\n    const refCount = bytes.readInt32LE(4)\n    const formatFlags = bytes.readInt32LE(8)\n    const coordinateType =\n      formatFlags & 0x10000 ? 'zero-based-half-open' : '1-based-closed'\n    const formatOpts: Record<number, string> = {\n      0: 'generic',\n      1: 'SAM',\n      2: 'VCF',\n    }\n    const format = formatOpts[formatFlags & 0xf]\n    if (!format) {\n      throw new Error(`invalid Tabix preset format flags ${formatFlags}`)\n    }\n    const columnNumbers = {\n      ref: bytes.readInt32LE(12),\n      start: bytes.readInt32LE(16),\n      end: bytes.readInt32LE(20),\n    }\n    const metaValue = bytes.readInt32LE(24)\n    const depth = 5\n    const maxBinNumber = ((1 << ((depth + 1) * 3)) - 1) / 7\n    const maxRefLength = 2 ** (14 + depth * 3)\n    const metaChar = metaValue ? String.fromCharCode(metaValue) : null\n    const skipLines = bytes.readInt32LE(28)\n\n    // read sequence dictionary\n    const nameSectionLength = bytes.readInt32LE(32)\n    const { refNameToId, refIdToName } = this._parseNameBytes(\n      bytes.slice(36, 36 + nameSectionLength),\n    )\n\n    // read the indexes for each reference sequence\n    let currOffset = 36 + nameSectionLength\n    let firstDataLine: VirtualOffset | undefined\n    const indices = new Array(refCount).fill(0).map(() => {\n      // the binning index\n      const binCount = bytes.readInt32LE(currOffset)\n      currOffset += 4\n      const binIndex: Record<number, Chunk[]> = {}\n      let stats\n      for (let j = 0; j < binCount; j += 1) {\n        const bin = bytes.readUInt32LE(currOffset)\n        currOffset += 4\n        if (bin > maxBinNumber + 1) {\n          throw new Error(\n            'tabix index contains too many bins, please use a CSI index',\n          )\n        } else if (bin === maxBinNumber + 1) {\n          const chunkCount = bytes.readInt32LE(currOffset)\n          currOffset += 4\n          if (chunkCount === 2) {\n            stats = this.parsePseudoBin(bytes, currOffset)\n          }\n          currOffset += 16 * chunkCount\n        } else {\n          const chunkCount = bytes.readInt32LE(currOffset)\n          currOffset += 4\n          const chunks = new Array(chunkCount)\n          for (let k = 0; k < chunkCount; k += 1) {\n            const u = fromBytes(bytes, currOffset)\n            const v = fromBytes(bytes, currOffset + 8)\n            currOffset += 16\n            firstDataLine = this._findFirstData(firstDataLine, u)\n            chunks[k] = new Chunk(u, v, bin)\n          }\n          binIndex[bin] = chunks\n        }\n      }\n\n      // the linear index\n      const linearCount = bytes.readInt32LE(currOffset)\n      currOffset += 4\n      const linearIndex = new Array(linearCount)\n      for (let k = 0; k < linearCount; k += 1) {\n        linearIndex[k] = fromBytes(bytes, currOffset)\n        currOffset += 8\n        firstDataLine = this._findFirstData(firstDataLine, linearIndex[k])\n      }\n      return { binIndex, linearIndex, stats }\n    })\n\n    return {\n      indices,\n      metaChar,\n      maxBinNumber,\n      maxRefLength,\n      skipLines,\n      firstDataLine,\n      columnNumbers,\n      coordinateType,\n      format,\n      refIdToName,\n      refNameToId,\n      maxBlockSize: 1 << 16,\n    }\n  }\n\n  parsePseudoBin(bytes: Buffer, offset: number) {\n    const lineCount = longToNumber(\n      Long.fromBytesLE(\n        bytes.slice(offset + 16, offset + 24) as unknown as number[],\n        true,\n      ),\n    )\n    return { lineCount }\n  }\n\n  _parseNameBytes(namesBytes: Buffer) {\n    let currRefId = 0\n    let currNameStart = 0\n    const refIdToName: string[] = []\n    const refNameToId: Record<string, number> = {}\n    for (let i = 0; i < namesBytes.length; i += 1) {\n      if (!namesBytes[i]) {\n        if (currNameStart < i) {\n          let refName = namesBytes.toString('utf8', currNameStart, i)\n          refName = this.renameRefSeq(refName)\n          refIdToName[currRefId] = refName\n          refNameToId[refName] = currRefId\n        }\n        currNameStart = i + 1\n        currRefId += 1\n      }\n    }\n    return { refNameToId, refIdToName }\n  }\n\n  async blocksForRange(\n    refName: string,\n    min: number,\n    max: number,\n    opts: Options = {},\n  ) {\n    if (min < 0) {\n      min = 0\n    }\n\n    const indexData = await this.parse(opts)\n    const refId = indexData.refNameToId[refName]\n    if (refId === undefined) {\n      return []\n    }\n    const ba = indexData.indices[refId]\n    if (!ba) {\n      return []\n    }\n\n    const minOffset = ba.linearIndex.length\n      ? ba.linearIndex[\n          min >> TAD_LIDX_SHIFT >= ba.linearIndex.length\n            ? ba.linearIndex.length - 1\n            : min >> TAD_LIDX_SHIFT\n        ]\n      : new VirtualOffset(0, 0)\n    if (!minOffset) {\n      console.warn('querying outside of possible tabix range')\n    }\n\n    // const { linearIndex, binIndex } = indexes\n\n    const overlappingBins = reg2bins(min, max) // List of bin #s that overlap min, max\n    const chunks: Chunk[] = []\n\n    // Find chunks in overlapping bins.  Leaf bins (< 4681) are not pruned\n    for (const [start, end] of overlappingBins) {\n      for (let bin = start; bin <= end; bin++) {\n        if (ba.binIndex[bin]) {\n          for (const c of ba.binIndex[bin]) {\n            chunks.push(new Chunk(c.minv, c.maxv, bin))\n          }\n        }\n      }\n    }\n\n    // Use the linear index to find minimum file position of chunks that could\n    // contain alignments in the region\n    const nintv = ba.linearIndex.length\n    let lowest = null\n    const minLin = Math.min(min >> 14, nintv - 1)\n    const maxLin = Math.min(max >> 14, nintv - 1)\n    for (let i = minLin; i <= maxLin; ++i) {\n      const vp = ba.linearIndex[i]\n      if (vp) {\n        if (!lowest || vp.compareTo(lowest) < 0) {\n          lowest = vp\n        }\n      }\n    }\n\n    return optimizeChunks(chunks, lowest)\n  }\n}\n","import Long from 'long'\nimport { Buffer } from 'buffer'\nimport { unzip } from '@gmod/bgzf-filehandle'\n\nimport VirtualOffset, { fromBytes } from './virtualOffset'\nimport Chunk from './chunk'\nimport { longToNumber, optimizeChunks } from './util'\n\nimport IndexFile, { Options } from './indexFile'\n\nconst CSI1_MAGIC = 21582659 // CSI\\1\nconst CSI2_MAGIC = 38359875 // CSI\\2\n\nfunction lshift(num: number, bits: number) {\n  return num * 2 ** bits\n}\nfunction rshift(num: number, bits: number) {\n  return Math.floor(num / 2 ** bits)\n}\n\nexport default class CSI extends IndexFile {\n  private maxBinNumber: number\n  private depth: number\n  private minShift: number\n  constructor(args: any) {\n    super(args)\n    this.maxBinNumber = 0\n    this.depth = 0\n    this.minShift = 0\n  }\n  async lineCount(refName: string, opts: Options = {}): Promise<number> {\n    const indexData = await this.parse(opts)\n    const refId = indexData.refNameToId[refName]\n    if (refId === undefined) {\n      return -1\n    }\n    const idx = indexData.indices[refId]\n    if (!idx) {\n      return -1\n    }\n    const { stats } = indexData.indices[refId]\n    if (stats) {\n      return stats.lineCount\n    }\n    return -1\n  }\n\n  indexCov() {\n    throw new Error('CSI indexes do not support indexcov')\n  }\n\n  parseAuxData(bytes: Buffer, offset: number) {\n    const formatFlags = bytes.readInt32LE(offset)\n    const coordinateType =\n      formatFlags & 0x10000 ? 'zero-based-half-open' : '1-based-closed'\n    const format = { 0: 'generic', 1: 'SAM', 2: 'VCF' }[formatFlags & 0xf]\n    if (!format) {\n      throw new Error(`invalid Tabix preset format flags ${formatFlags}`)\n    }\n    const columnNumbers = {\n      ref: bytes.readInt32LE(offset + 4),\n      start: bytes.readInt32LE(offset + 8),\n      end: bytes.readInt32LE(offset + 12),\n    }\n    const metaValue = bytes.readInt32LE(offset + 16)\n    const metaChar = metaValue ? String.fromCharCode(metaValue) : null\n    const skipLines = bytes.readInt32LE(offset + 20)\n    const nameSectionLength = bytes.readInt32LE(offset + 24)\n\n    const { refIdToName, refNameToId } = this._parseNameBytes(\n      bytes.slice(offset + 28, offset + 28 + nameSectionLength),\n    )\n\n    return {\n      refIdToName,\n      refNameToId,\n      skipLines,\n      metaChar,\n      columnNumbers,\n      format,\n      coordinateType,\n    }\n  }\n\n  _parseNameBytes(namesBytes: Buffer) {\n    let currRefId = 0\n    let currNameStart = 0\n    const refIdToName = []\n    const refNameToId: Record<string, number> = {}\n    for (let i = 0; i < namesBytes.length; i += 1) {\n      if (!namesBytes[i]) {\n        if (currNameStart < i) {\n          let refName = namesBytes.toString('utf8', currNameStart, i)\n          refName = this.renameRefSeq(refName)\n          refIdToName[currRefId] = refName\n          refNameToId[refName] = currRefId\n        }\n        currNameStart = i + 1\n        currRefId += 1\n      }\n    }\n    return { refNameToId, refIdToName }\n  }\n\n  // fetch and parse the index\n\n  async _parse(opts: Options = {}) {\n    const bytes = await unzip(await this.filehandle.readFile(opts))\n\n    // check TBI magic numbers\n    let csiVersion\n    if (bytes.readUInt32LE(0) === CSI1_MAGIC) {\n      csiVersion = 1\n    } else if (bytes.readUInt32LE(0) === CSI2_MAGIC) {\n      csiVersion = 2\n    } else {\n      throw new Error('Not a CSI file')\n      // TODO: do we need to support big-endian CSI files?\n    }\n\n    this.minShift = bytes.readInt32LE(4)\n    this.depth = bytes.readInt32LE(8)\n    this.maxBinNumber = ((1 << ((this.depth + 1) * 3)) - 1) / 7\n    const maxRefLength = 2 ** (this.minShift + this.depth * 3)\n    const auxLength = bytes.readInt32LE(12)\n    const aux =\n      auxLength && auxLength >= 30\n        ? this.parseAuxData(bytes, 16)\n        : {\n            refIdToName: [],\n            refNameToId: {},\n            metaChar: null,\n            columnNumbers: { ref: 0, start: 1, end: 2 },\n            coordinateType: 'zero-based-half-open',\n            format: 'generic',\n          }\n    const refCount = bytes.readInt32LE(16 + auxLength)\n\n    // read the indexes for each reference sequence\n    let firstDataLine: VirtualOffset | undefined\n    let currOffset = 16 + auxLength + 4\n    const indices = new Array(refCount).fill(0).map(() => {\n      // the binning index\n      const binCount = bytes.readInt32LE(currOffset)\n      currOffset += 4\n      const binIndex: Record<string, Chunk[]> = {}\n      let stats // < provided by parsing a pseudo-bin, if present\n      for (let j = 0; j < binCount; j += 1) {\n        const bin = bytes.readUInt32LE(currOffset)\n        if (bin > this.maxBinNumber) {\n          // this is a fake bin that actually has stats information\n          // about the reference sequence in it\n          stats = this.parsePseudoBin(bytes, currOffset + 4)\n          currOffset += 4 + 8 + 4 + 16 + 16\n        } else {\n          const loffset = fromBytes(bytes, currOffset + 4)\n          firstDataLine = this._findFirstData(firstDataLine, loffset)\n          const chunkCount = bytes.readInt32LE(currOffset + 12)\n          currOffset += 16\n          const chunks = new Array(chunkCount)\n          for (let k = 0; k < chunkCount; k += 1) {\n            const u = fromBytes(bytes, currOffset)\n            const v = fromBytes(bytes, currOffset + 8)\n            currOffset += 16\n            // this._findFirstData(data, u)\n            chunks[k] = new Chunk(u, v, bin)\n          }\n          binIndex[bin] = chunks\n        }\n      }\n\n      return { binIndex, stats }\n    })\n\n    return {\n      ...aux,\n      csi: true,\n      refCount,\n      maxBlockSize: 1 << 16,\n      firstDataLine,\n      csiVersion,\n      indices,\n      depth: this.depth,\n      maxBinNumber: this.maxBinNumber,\n      maxRefLength,\n    }\n  }\n\n  parsePseudoBin(bytes: Buffer, offset: number) {\n    const lineCount = longToNumber(\n      Long.fromBytesLE(\n        bytes.slice(offset + 28, offset + 36) as unknown as number[],\n        true,\n      ),\n    )\n    return { lineCount }\n  }\n\n  async blocksForRange(\n    refName: string,\n    min: number,\n    max: number,\n    opts: Options = {},\n  ) {\n    if (min < 0) {\n      min = 0\n    }\n\n    const indexData = await this.parse(opts)\n    const refId = indexData.refNameToId[refName]\n    if (refId === undefined) {\n      return []\n    }\n    const ba = indexData.indices[refId]\n    if (!ba) {\n      return []\n    }\n\n    // const { linearIndex, binIndex } = indexes\n\n    const overlappingBins = this.reg2bins(min, max) // List of bin #s that overlap min, max\n    const chunks: Chunk[] = []\n\n    // Find chunks in overlapping bins.  Leaf bins (< 4681) are not pruned\n    for (const [start, end] of overlappingBins) {\n      for (let bin = start; bin <= end; bin++) {\n        if (ba.binIndex[bin]) {\n          for (const c of ba.binIndex[bin]) {\n            chunks.push(new Chunk(c.minv, c.maxv, bin))\n          }\n        }\n      }\n    }\n\n    return optimizeChunks(chunks, new VirtualOffset(0, 0))\n  }\n\n  /**\n   * calculate the list of bins that may overlap with region [beg,end) (zero-based half-open)\n   */\n  reg2bins(beg: number, end: number) {\n    beg -= 1 // < convert to 1-based closed\n    if (beg < 1) {\n      beg = 1\n    }\n    if (end > 2 ** 50) {\n      end = 2 ** 34\n    } // 17 GiB ought to be enough for anybody\n    end -= 1\n    let l = 0\n    let t = 0\n    let s = this.minShift + this.depth * 3\n    const bins = []\n    for (; l <= this.depth; s -= 3, t += lshift(1, l * 3), l += 1) {\n      const b = t + rshift(beg, s)\n      const e = t + rshift(end, s)\n      if (e - b + bins.length > this.maxBinNumber) {\n        throw new Error(\n          `query ${beg}-${end} is too large for current binning scheme (shift ${this.minShift}, depth ${this.depth}), try a smaller query or a coarser index binning scheme`,\n        )\n      }\n      bins.push([b, e] as const)\n    }\n    return bins\n  }\n}\n","import AbortablePromiseCache from '@gmod/abortable-promise-cache'\nimport LRU from 'quick-lru'\nimport { Buffer } from 'buffer'\nimport { GenericFilehandle, RemoteFile, LocalFile } from 'generic-filehandle'\nimport { unzip, unzipChunkSlice } from '@gmod/bgzf-filehandle'\nimport { checkAbortSignal } from './util'\nimport IndexFile, { Options, IndexData } from './indexFile'\n\nimport Chunk from './chunk'\nimport TBI from './tbi'\nimport CSI from './csi'\n\ntype GetLinesCallback = (line: string, fileOffset: number) => void\n\nconst decoder =\n  typeof TextDecoder !== 'undefined' ? new TextDecoder('utf8') : undefined\n\ninterface GetLinesOpts {\n  [key: string]: unknown\n  signal?: AbortSignal\n  lineCallback: GetLinesCallback\n}\n\ninterface ReadChunk {\n  buffer: Buffer\n  cpositions: number[]\n  dpositions: number[]\n}\n\nfunction timeout(time: number) {\n  return new Promise(resolve => setTimeout(resolve, time))\n}\nexport default class TabixIndexedFile {\n  private filehandle: GenericFilehandle\n  private index: IndexFile\n  private yieldTime: number\n  private renameRefSeq: (n: string) => string\n  private chunkCache: AbortablePromiseCache<Chunk, ReadChunk>\n\n  /**\n   * @param {object} args\n   *\n   * @param {string} [args.path]\n   *\n   * @param {filehandle} [args.filehandle]\n   *\n   * @param {string} [args.tbiPath]\n   *\n   * @param {filehandle} [args.tbiFilehandle]\n   *\n   * @param {string} [args.csiPath]\n   *\n   * @param {filehandle} [args.csiFilehandle]\n   *\n   * @param {number} [args.yieldTime] yield to main thread after N milliseconds\n   * if reading features is taking a long time to avoid hanging main thread\n   *\n   * @param {function} [args.renameRefSeqs] optional function with sig `string\n   * => string` to transform reference sequence names for the purpose of\n   * indexing and querying. note that the data that is returned is not altered,\n   * just the names of the reference sequences that are used for querying.\n   */\n  constructor({\n    path,\n    filehandle,\n    url,\n    tbiPath,\n    tbiUrl,\n    tbiFilehandle,\n    csiPath,\n    csiUrl,\n    csiFilehandle,\n    yieldTime = 500,\n    renameRefSeqs = n => n,\n    chunkCacheSize = 5 * 2 ** 20,\n  }: {\n    path?: string\n    filehandle?: GenericFilehandle\n    url?: string\n    tbiPath?: string\n    tbiUrl?: string\n    tbiFilehandle?: GenericFilehandle\n    csiPath?: string\n    csiUrl?: string\n    csiFilehandle?: GenericFilehandle\n    yieldTime?: number\n    renameRefSeqs?: (n: string) => string\n    chunkCacheSize?: number\n  }) {\n    if (filehandle) {\n      this.filehandle = filehandle\n    } else if (path) {\n      this.filehandle = new LocalFile(path)\n    } else if (url) {\n      this.filehandle = new RemoteFile(url)\n    } else {\n      throw new TypeError('must provide either filehandle or path')\n    }\n\n    if (tbiFilehandle) {\n      this.index = new TBI({\n        filehandle: tbiFilehandle,\n        renameRefSeqs,\n      })\n    } else if (csiFilehandle) {\n      this.index = new CSI({\n        filehandle: csiFilehandle,\n        renameRefSeqs,\n      })\n    } else if (tbiPath) {\n      this.index = new TBI({\n        filehandle: new LocalFile(tbiPath),\n        renameRefSeqs,\n      })\n    } else if (csiPath) {\n      this.index = new CSI({\n        filehandle: new LocalFile(csiPath),\n        renameRefSeqs,\n      })\n    } else if (path) {\n      this.index = new TBI({\n        filehandle: new LocalFile(`${path}.tbi`),\n        renameRefSeqs,\n      })\n    } else if (csiUrl) {\n      this.index = new CSI({\n        filehandle: new RemoteFile(csiUrl),\n      })\n    } else if (tbiUrl) {\n      this.index = new TBI({\n        filehandle: new RemoteFile(tbiUrl),\n      })\n    } else if (url) {\n      this.index = new TBI({\n        filehandle: new RemoteFile(`${url}.tbi`),\n      })\n    } else {\n      throw new TypeError(\n        'must provide one of tbiFilehandle, tbiPath, csiFilehandle, csiPath, tbiUrl, csiUrl',\n      )\n    }\n\n    this.renameRefSeq = renameRefSeqs\n    this.yieldTime = yieldTime\n    this.chunkCache = new AbortablePromiseCache<Chunk, ReadChunk>({\n      cache: new LRU({ maxSize: Math.floor(chunkCacheSize / (1 << 16)) }),\n      fill: (args: Chunk, signal?: AbortSignal) =>\n        this.readChunk(args, { signal }),\n    })\n  }\n\n  /**\n   * @param refName name of the reference sequence\n   *\n   * @param start start of the region (in 0-based half-open coordinates)\n   *\n   * @param end end of the region (in 0-based half-open coordinates)\n   *\n   * @param opts callback called for each line in the region. can also pass a\n   * object param containing obj.lineCallback, obj.signal, etc\n   *\n   * @returns promise that is resolved when the whole read is finished,\n   * rejected on error\n   */\n  async getLines(\n    refName: string,\n    s: number | undefined,\n    e: number | undefined,\n    opts: GetLinesOpts | GetLinesCallback,\n  ) {\n    let signal: AbortSignal | undefined\n    let options: Options = {}\n    let callback: (line: string, lineOffset: number) => void\n\n    if (typeof opts === 'function') {\n      callback = opts\n    } else {\n      options = opts\n      callback = opts.lineCallback\n      signal = opts.signal\n    }\n\n    const metadata = await this.index.getMetadata(options)\n    checkAbortSignal(signal)\n    const start = s ?? 0\n    const end = e ?? metadata.maxRefLength\n    if (!(start <= end)) {\n      throw new TypeError(\n        'invalid start and end coordinates. start must be less than or equal to end',\n      )\n    }\n    if (start === end) {\n      return\n    }\n\n    const chunks = await this.index.blocksForRange(refName, start, end, options)\n    checkAbortSignal(signal)\n\n    // now go through each chunk and parse and filter the lines out of it\n    let last = Date.now()\n    for (const c of chunks) {\n      let previousStartCoordinate: number | undefined\n      const { buffer, cpositions, dpositions } = await this.chunkCache.get(\n        c.toString(),\n        c,\n        signal,\n      )\n\n      checkAbortSignal(signal)\n      let blockStart = 0\n      let pos = 0\n      while (blockStart < buffer.length) {\n        const n = buffer.indexOf('\\n', blockStart)\n        if (n === -1) {\n          break\n        }\n        const b = buffer.slice(blockStart, n)\n        const line = decoder?.decode(b) ?? b.toString()\n\n        // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n        if (dpositions) {\n          while (blockStart + c.minv.dataPosition >= dpositions[pos++]!) {}\n          pos--\n        }\n\n        // filter the line for whether it is within the requested range\n        const { startCoordinate, overlaps } = this.checkLine(\n          metadata,\n          refName,\n          start,\n          end,\n          line,\n        )\n\n        // do a small check just to make sure that the lines are really sorted\n        // by start coordinate\n        if (\n          previousStartCoordinate !== undefined &&\n          startCoordinate !== undefined &&\n          previousStartCoordinate > startCoordinate\n        ) {\n          throw new Error(\n            `Lines not sorted by start coordinate (${previousStartCoordinate} > ${startCoordinate}), this file is not usable with Tabix.`,\n          )\n        }\n        previousStartCoordinate = startCoordinate\n\n        if (overlaps) {\n          callback(\n            line.trim(),\n            // cpositions[pos] refers to actual file offset of a bgzip block boundaries\n            //\n            // we multiply by (1 <<8) in order to make sure each block has a \"unique\"\n            // address space so that data in that block could never overlap\n            //\n            // then the blockStart-dpositions is an uncompressed file offset from\n            // that bgzip block boundary, and since the cpositions are multiplied by\n            // (1 << 8) these uncompressed offsets get a unique space\n            cpositions[pos]! * (1 << 8) +\n              (blockStart - dpositions[pos]!) +\n              c.minv.dataPosition +\n              1,\n          )\n        } else if (startCoordinate !== undefined && startCoordinate >= end) {\n          // the lines were overlapping the region, but now have stopped, so\n          // we must be at the end of the relevant data and we can stop\n          // processing data now\n          return\n        }\n\n        // yield if we have emitted beyond the yield limit\n        if (this.yieldTime && last - Date.now() > this.yieldTime) {\n          last = Date.now()\n          checkAbortSignal(signal)\n          await timeout(1)\n        }\n        blockStart = n + 1\n      }\n    }\n  }\n\n  async getMetadata(opts: Options = {}) {\n    return this.index.getMetadata(opts)\n  }\n\n  /**\n   * get a buffer containing the \"header\" region of the file, which are the\n   * bytes up to the first non-meta line\n   */\n  async getHeaderBuffer(opts: Options = {}) {\n    const { firstDataLine, metaChar, maxBlockSize } =\n      await this.getMetadata(opts)\n    checkAbortSignal(opts.signal)\n\n    // eslint-disable-next-line @typescript-eslint/restrict-plus-operands\n    const maxFetch = (firstDataLine?.blockPosition || 0) + maxBlockSize\n    // TODO: what if we don't have a firstDataLine, and the header\n    // actually takes up more than one block? this case is not covered here\n\n    const buf = await this._readRegion(0, maxFetch, opts)\n    const bytes = await unzip(buf)\n\n    // trim off lines after the last non-meta line\n    if (metaChar) {\n      // trim backward from the end\n      let lastNewline = -1\n      const newlineByte = '\\n'.charCodeAt(0)\n      const metaByte = metaChar.charCodeAt(0)\n      for (let i = 0; i < bytes.length; i += 1) {\n        if (i === lastNewline + 1 && bytes[i] !== metaByte) {\n          break\n        }\n        if (bytes[i] === newlineByte) {\n          lastNewline = i\n        }\n      }\n      return bytes.slice(0, lastNewline + 1)\n    }\n    return bytes\n  }\n\n  /**\n   * get a string containing the \"header\" region of the file, is the portion up\n   * to the first non-meta line\n   *\n   * @returns {Promise} for a string\n   */\n  async getHeader(opts: Options = {}) {\n    const bytes = await this.getHeaderBuffer(opts)\n    return bytes.toString('utf8')\n  }\n\n  /**\n   * get an array of reference sequence names, in the order in which they occur\n   * in the file. reference sequence renaming is not applied to these names.\n   */\n  async getReferenceSequenceNames(opts: Options = {}) {\n    const metadata = await this.getMetadata(opts)\n    return metadata.refIdToName\n  }\n\n  /**\n   * @param {object} metadata metadata object from the parsed index, containing\n   * columnNumbers, metaChar, and format\n   *\n   * @param {string} regionRefName\n   *\n   * @param {number} regionStart region start coordinate (0-based-half-open)\n   *\n   * @param {number} regionEnd region end coordinate (0-based-half-open)\n   *\n   * @param {array[string]} line\n   *\n   * @returns {object} like `{startCoordinate, overlaps}`. overlaps is boolean,\n   * true if line is a data line that overlaps the given region\n   */\n  checkLine(\n    metadata: IndexData,\n    regionRefName: string,\n    regionStart: number,\n    regionEnd: number,\n    line: string,\n  ) {\n    const { columnNumbers, metaChar, coordinateType, format } = metadata\n    // skip meta lines\n    if (metaChar && line.startsWith(metaChar)) {\n      return { overlaps: false }\n    }\n\n    // check ref/start/end using column metadata from index\n    let { ref, start, end } = columnNumbers\n    if (!ref) {\n      ref = 0\n    }\n    if (!start) {\n      start = 0\n    }\n    if (!end) {\n      end = 0\n    }\n    if (format === 'VCF') {\n      end = 8\n    }\n    const maxColumn = Math.max(ref, start, end)\n\n    // this code is kind of complex, but it is fairly fast. basically, we want\n    // to avoid doing a split, because if the lines are really long that could\n    // lead to us allocating a bunch of extra memory, which is slow\n\n    let currentColumnNumber = 1 // cols are numbered starting at 1 in the index metadata\n    let currentColumnStart = 0\n    let refSeq = ''\n    let startCoordinate = -Infinity\n    for (let i = 0; i < line.length + 1; i += 1) {\n      if (line[i] === '\\t' || i === line.length) {\n        if (currentColumnNumber === ref) {\n          if (\n            this.renameRefSeq(line.slice(currentColumnStart, i)) !==\n            regionRefName\n          ) {\n            return { overlaps: false }\n          }\n        } else if (currentColumnNumber === start) {\n          startCoordinate = parseInt(line.slice(currentColumnStart, i), 10)\n          // we convert to 0-based-half-open\n          if (coordinateType === '1-based-closed') {\n            startCoordinate -= 1\n          }\n          if (startCoordinate >= regionEnd) {\n            return { startCoordinate, overlaps: false }\n          }\n          if (end === 0 || end === start) {\n            // if we have no end, we assume the feature is 1 bp long\n            if (startCoordinate + 1 <= regionStart) {\n              return { startCoordinate, overlaps: false }\n            }\n          }\n        } else if (format === 'VCF' && currentColumnNumber === 4) {\n          refSeq = line.slice(currentColumnStart, i)\n        } else if (currentColumnNumber === end) {\n          // this will never match if there is no end column\n          const endCoordinate =\n            format === 'VCF'\n              ? this._getVcfEnd(\n                  startCoordinate,\n                  refSeq,\n                  line.slice(currentColumnStart, i),\n                )\n              : parseInt(line.slice(currentColumnStart, i), 10)\n          if (endCoordinate <= regionStart) {\n            return { overlaps: false }\n          }\n        }\n        currentColumnStart = i + 1\n        currentColumnNumber += 1\n        if (currentColumnNumber > maxColumn) {\n          break\n        }\n      }\n    }\n    return { startCoordinate, overlaps: true }\n  }\n\n  _getVcfEnd(startCoordinate: number, refSeq: string, info: any) {\n    let endCoordinate = startCoordinate + refSeq.length\n    // ignore TRA features as they specify CHR2 and END as being on a different\n    // chromosome\n    //\n    // if CHR2 is on the same chromosome, still ignore it because there should\n    // be another pairwise feature at the end of this one\n    const isTRA = info.includes('SVTYPE=TRA')\n    if (info[0] !== '.' && !isTRA) {\n      let prevChar = ';'\n      for (let j = 0; j < info.length; j += 1) {\n        if (prevChar === ';' && info.slice(j, j + 4) === 'END=') {\n          let valueEnd = info.indexOf(';', j)\n          if (valueEnd === -1) {\n            valueEnd = info.length\n          }\n          endCoordinate = parseInt(info.slice(j + 4, valueEnd), 10)\n          break\n        }\n        prevChar = info[j]\n      }\n    } else if (isTRA) {\n      return startCoordinate + 1\n    }\n    return endCoordinate\n  }\n\n  /**\n   * return the approximate number of data lines in the given reference\n   * sequence\n   *\n   * @param refSeq reference sequence name\n   *\n   * @returns number of data lines present on that reference sequence\n   */\n  async lineCount(refName: string, opts: Options = {}) {\n    return this.index.lineCount(refName, opts)\n  }\n\n  async _readRegion(pos: number, size: number, opts: Options = {}) {\n    const b = Buffer.alloc(size)\n    const { bytesRead, buffer } = await this.filehandle.read(\n      b,\n      0,\n      size,\n      pos,\n      opts,\n    )\n\n    return buffer.slice(0, bytesRead)\n  }\n\n  /**\n   * read and uncompress the data in a chunk (composed of one or more\n   * contiguous bgzip blocks) of the file\n   */\n  async readChunk(c: Chunk, opts: Options = {}) {\n    // fetch the uncompressed data, uncompress carefully a block at a time, and\n    // stop when done\n\n    const data = await this._readRegion(\n      c.minv.blockPosition,\n      c.fetchedSize(),\n      opts,\n    )\n    return unzipChunkSlice(data, c)\n  }\n}\n"],"names":["NullSignal","AggregateAbortController","signals","Set","abortController","AbortController","addSignal","signal","this","aborted","Error","add","handleAborted","addEventListener","delete","size","abort","AggregateStatusReporter","callbacks","addCallback","callback","currentMessage","message","elt","AbortablePromiseCache","constructor","fill","cache","TypeError","get","set","fillCallback","isAbortException","exception","name","code","evict","key","entry","data","statusCallback","aborter","statusReporter","newEntry","promise","settled","then","catch","error","console","checkSinglePromise","checkForSingleAbort","Object","assign","result","has","AbortSignal","cacheEntry","cachedEntry","clear","keyIter","keys","deleteCount","next","done","value","async","unzip","inputData","strm","pos","i","chunks","inflator","totalSize","remainingInput","subarray","Inflate","push","Z_SYNC_FLUSH","err","msg","next_in","length","avail_in","Uint8Array","offset","from","e","match","unzipChunkSlice","chunk","minv","maxv","cpos","blockPosition","dpos","dataPosition","cpositions","dpositions","buffer","len","origCpos","GziIndex","filehandle","path","_readLongWithOverflow","buf","unsigned","long","slice","greaterThan","Number","MAX_SAFE_INTEGER","lessThan","MIN_SAFE_INTEGER","toNumber","_getIndex","index","_readIndex","allocUnsafe","read","numEntries","entries","Array","bufSize","entryNumber","compressedPosition","uncompressedPosition","getLastBlock","getRelevantBlocksForRead","position","endPosition","relevant","compare","nextEntry","nextUncompressedPosition","Infinity","lowerBound","upperBound","searchPosition","Math","floor","comparison","ceil","BgzFilehandle","gziFilehandle","gziPath","gzi","stat","compressedStat","getUncompressedFileSize","blocks","undefined","blksize","bytesRead","readUInt32LE","_readAndUncompressBlock","blockBuffer","nextCompressedPosition","blockCompressedLength","blockPositions","destinationOffset","blockNum","uncompressedBuffer","sourceOffset","sourceEnd","min","copy","longToNumber","AbortError","checkAbortSignal","DOMException","optimizeChunks","lowest","mergedChunks","lastChunk","sort","c0","c1","dif","forEach","chunk1","chunk2","compareTo","VirtualOffset","toString","b","fromBytes","bytes","bigendian","Chunk","bin","fetchedSize","_fetchedSize","toUniqueString","IndexFile","renameRefSeqs","n","renameRefSeq","getMetadata","opts","indices","_indices","rest","parse","_findFirstData","currentFdl","virtualOffset","parseP","_parse","hasRefSeq","seqId","binIndex","TabixIndex","lineCount","refName","indexData","refId","refNameToId","stats","readFile","refCount","readInt32LE","formatFlags","coordinateType","format","columnNumbers","ref","start","end","metaValue","metaChar","String","fromCharCode","skipLines","nameSectionLength","refIdToName","_parseNameBytes","firstDataLine","currOffset","map","binCount","j","maxBinNumber","chunkCount","parsePseudoBin","k","u","v","linearCount","linearIndex","maxRefLength","maxBlockSize","namesBytes","currRefId","currNameStart","blocksForRange","max","ba","warn","overlappingBins","beg","c","nintv","minLin","maxLin","vp","rshift","num","bits","CSI","args","super","depth","minShift","indexCov","parseAuxData","csiVersion","auxLength","aux","loffset","csi","reg2bins","l","t","s","bins","decoder","TextDecoder","timeout","time","Promise","resolve","setTimeout","TabixIndexedFile","url","tbiPath","tbiUrl","tbiFilehandle","csiPath","csiUrl","csiFilehandle","yieldTime","chunkCacheSize","chunkCache","A","maxSize","readChunk","getLines","options","lineCallback","metadata","last","Date","now","previousStartCoordinate","blockStart","indexOf","line","decode","startCoordinate","overlaps","checkLine","trim","getHeaderBuffer","maxFetch","_readRegion","lastNewline","newlineByte","charCodeAt","metaByte","getHeader","getReferenceSequenceNames","regionRefName","regionStart","regionEnd","startsWith","maxColumn","currentColumnNumber","currentColumnStart","refSeq","parseInt","_getVcfEnd","info","endCoordinate","isTRA","includes","prevChar","valueEnd","alloc"],"sourceRoot":""}
\ No newline at end of file