annotate x/static/js/5260.e7b7aeb6.chunk.js.map @ 125:49f3d3878413 draft

planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/jbrowse2 commit 5ea1f9c1eef1de76232e69aa6d34cda77d90d566
author fubar
date Sat, 05 Oct 2024 23:58:05 +0000
parents
children
Ignore whitespace changes - Everywhere: Within whitespace: At end of lines:
rev   line source
125
49f3d3878413 planemo upload for repository https://github.com/galaxyproject/tools-iuc/tree/master/tools/jbrowse2 commit 5ea1f9c1eef1de76232e69aa6d34cda77d90d566
fubar
parents:
diff changeset
1 {"version":3,"file":"static/js/5260.e7b7aeb6.chunk.js","mappings":"sKAGA,MAAMA,EAAa,MAeJ,MAAMC,EAEVC,QACAC,OACAC,WAHT,WAAAC,CACSH,EACAC,EACAC,EAAa,IAFb,KAAAF,QAAAA,EACA,KAAAC,OAAAA,EACA,KAAAC,WAAAA,CACN,CAEH,YAAME,CAAOC,EAAsBC,GACjC,IAAIC,EAAY,GAChB,MAGMC,EAHcH,EAAaI,MAAM,KAGR,GAAGC,cAC5BC,QAAYC,KAAKC,WAAWL,EAAYF,GAC9C,IAAKK,EACH,MAAO,GAGT,IAAI,IAAEG,EAAG,OAAEC,GAAWJ,EAClBK,GAAO,EAEX,MAAQA,GAAM,CACZ,IAAIC,GAAiB,EACrB,MAAMC,EAAMH,EAAOI,WAIbC,EAAQF,EACXG,MAAM,EAAGH,EAAII,YAAY,OACzBb,MAAM,MACNc,QAAOC,KAAOA,IAEXC,EAAQ,GACd,IAAK,MAAMC,KAAQN,EAAO,CACxB,MAAMO,EAAOD,EAAKjB,MAAM,KAAK,GACvBmB,EAAQD,EAAKE,WAAWrB,IACzBS,GAAkBW,IACrBX,GAAiB,GAKfU,EAAKN,MAAM,EAAGb,EAAWsB,QAAUtB,IACrCQ,GAAO,GAELY,GACFH,EAAMM,KAAKL,EAEf,CACA,MAAMM,EAAOP,EAAMQ,SAAQP,IACzB,MAAOQ,KAASC,GAAST,EAAKjB,MAAM,KACpC,OAAO0B,EAAMC,KAAIC,GAAO,CAACH,EAAMG,EAAI5B,MAAM,KAAK,KAAwB,IAKxE,GAAIF,EAAUuB,OAASE,EAAKF,OAASlB,KAAKV,aAAec,EAAM,CAC7D,MAAMsB,QAAa1B,KAAKX,OAAOsC,KAC7B,KAAOC,MAAM1C,GACb,EACAA,EACAgB,EACAR,GAIF,IAAKgC,EAAKG,UAAW,CACnBlC,EAAYA,EAAUmC,OAAOV,GAC7B,KACF,CACAjB,EAAS,KAAO2B,OAAO,CAAC3B,EAAQuB,EAAKvB,SACrCD,GAAOhB,CACT,MAIK,GAAIS,EAAUuB,OAASE,EAAKF,QAAUlB,KAAKV,YAAcc,EAAM,CAClET,EAAYA,EAAUmC,OAAOV,GAC7B,KACF,CACF,CAGA,OA3FJ,SAAmBW,GACjB,MAAMC,EAAO,IAAIC,IACjB,OAAOF,EAAEpB,QAAOuB,IACd,MAAMC,EAAQD,EAwFsB,GAvFpC,OAAOF,EAAKI,IAAID,IAAaH,EAAKK,IAAIF,EAAE,GAE5C,CAqFWG,CAAO3C,GAA0Bc,MAAM,EAAGT,KAAKV,WACxD,CAEQ,cAAMiD,CAAS7C,GAKrB,aAJmBM,KAAKZ,QAAQoD,SAAS,CACvCC,SAAU,UACP/C,KAGFG,MAAM,MACNc,QAAOC,KAAOA,IACdY,KAAIV,IACH,MAAM4B,EAAI5B,EAAKI,OA1GF,GA2GPyB,EAAS7B,EAAKL,MAAM,EAAGiC,GACvBE,EAAS9B,EAAKL,MAAMiC,GAE1B,MAAO,CAACC,EADIE,OAAOC,SAASF,EAAQ,IACP,GAEnC,CAEQ,gBAAM3C,CACZL,EACAF,GAEA,IAAIqD,EAAQ,EACR7C,EAAM,MACV,MAAM8C,QAAgBhD,KAAKuC,SAAS7C,GACpC,IAAK,MAAOuD,EAAKC,KAAUF,EACNC,EAAIxC,MAAM,EAAGb,EAAWsB,QAC1BtB,IACfmD,EAAQG,EACRhD,EAAMgD,EAAQ,OAKlB,MAAMC,EAAMjD,EAAM6C,EAClB,KAAII,EAAM,GAIV,MAAO,UADWnD,KAAKX,OAAOsC,KAAK,KAAOC,MAAMuB,GAAM,EAAGA,EAAKJ,EAAOrD,GAGnEQ,MAEJ,E,+CCnIF,SAASkD,EAA0BC,GACjC,IACE,OAAOC,mBAAmBD,EAC5B,CAAE,MAAOE,GAEP,OAAOF,CACT,CACF,CAEA,SAASG,EAAQlD,EAAagB,EAAcmC,EAAI,IAC9C,MAAMC,EAAOpD,EAAIR,cAAc6D,QAAQrC,GAEvC,OAAOhB,EAAIY,OAAS,GAChBZ,GACCsD,KAAKC,IAAI,EAAGH,EAAOD,GAAK,EAAI,MAAQ,IACnCnD,EAAIG,MAAMmD,KAAKC,IAAI,EAAGH,EAAOD,GAAIC,EAAOpC,EAAKJ,OAASuC,GAAGK,QACxDJ,EAAOpC,EAAKJ,OAASZ,EAAIY,OAAS,MAAQ,GACnD,CAEe,MAAM6C,UACXC,EAAAA,YAORzE,WAAAA,CACE0E,EACAC,EACAC,GAEAC,MAAMH,EAAQC,EAAeC,GAC7B,MAAME,GAAaC,EAAAA,EAAAA,gBAAeL,EAAQ,cACpCM,GAAcD,EAAAA,EAAAA,gBAAeL,EAAQ,eAE3C,IAAKI,EACH,MAAM,IAAIG,MAAM,uBAElB,IAAKD,EACH,MAAM,IAAIC,MAAM,wBAElBxE,KAAKyE,OAAS,IAAItF,GAChBuF,EAAAA,EAAAA,cAAaH,EAAaJ,IAC1BO,EAAAA,EAAAA,cAAaL,EAAYF,GACzB,KAEJ,CAOA,iBAAMQ,CAAYC,GAChB,MAAMC,EAAQD,EAAKE,YAAYhF,cACzBiF,EAAOF,EAAMhF,MAAM,KAEnBmF,SADgBhF,KAAKyE,OAAOjF,OAAOqF,IAGtClE,QAAO,EAAE,CAAEsE,KACVF,EAAKG,OAAMC,GACT/B,EAA0B6B,GAAMnF,cAAcsF,SAASD,OAG1D3D,KAAI,EAAEF,EAAM2D,MACX,MAAMI,EAASC,KAAKC,MAAMN,EAAKO,WAAW,IAAK,OACxCC,EAAKC,KAAYC,GAAQN,EAAO7D,KAAIoE,GACzCxC,EAA0BwC,KAGtBC,EAAgBF,EAAKG,WAAUrE,KAASA,IACxCsE,EAAaJ,EAChBnE,KAAIC,GAAOA,EAAI3B,gBACfgG,WAAUlF,GAAKA,EAAEwE,SAAS9D,EAAKxB,iBAE5BkG,EAAaL,EAAKE,GAClBI,EAAeN,EAAKI,GACpBG,GACY,IAAhBH,EAAoBvC,EAAQyC,EAAc3E,QAAQ6E,EAC9CC,EAAQ5C,EAAQwC,EAAY1E,GAE5B+E,EACHH,GAAWE,EAAMtG,gBAAkBoG,EAAQpG,cAExC,GAAGsG,MAAUF,KADbE,EAGN,OAAO,IAAIE,EAAAA,EAAW,CACpBC,UAAWd,EACXW,MAAOJ,EACPK,gBACAG,cAAenB,EAAO7D,KAAIoE,GAAUtC,mBAAmBsC,KACvDF,WACA,IAGN,MAA2B,UAApBd,EAAK6B,WACRzB,EAAUrE,QACRwE,GAAKA,EAAEuB,WAAW5G,gBAAkB8E,EAAKE,YAAYhF,gBAEvDkF,CACN,CAEA2B,aAAAA,GAAiB,E","sources":["../../../node_modules/@gmod/trix/src/index.ts","../../../plugins/trix/src/TrixTextSearchAdapter/TrixTextSearchAdapter.ts"],"sourcesContent":["import { Buffer } from 'buffer'\nimport type { GenericFilehandle } from 'generic-filehandle'\n\nconst CHUNK_SIZE = 65536\n\n// this is the number of hex characters to use for the address in ixixx, see\n// https://github.com/GMOD/ixixx-js/blob/master/src/index.ts#L182\nconst ADDRESS_SIZE = 10\n\n// https://stackoverflow.com/a/9229821/2129219\nfunction uniqBy<T>(a: T[], key: (elt: T) => string) {\n const seen = new Set()\n return a.filter(item => {\n const k = key(item)\n return seen.has(k) ? false : seen.add(k)\n })\n}\n\nexport default class Trix {\n constructor(\n public ixxFile: GenericFilehandle,\n public ixFile: GenericFilehandle,\n public maxResults = 20,\n ) {}\n\n async search(searchString: string, opts?: { signal?: AbortSignal }) {\n let resultArr = [] as [string, string][]\n const searchWords = searchString.split(' ')\n\n // we only search one word at a time\n const searchWord = searchWords[0].toLowerCase()\n const res = await this._getBuffer(searchWord, opts)\n if (!res) {\n return []\n }\n\n let { end, buffer } = res\n let done = false\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n while (!done) {\n let foundSomething = false\n const str = buffer.toString()\n\n // slice to lastIndexOf('\\n') to make sure we get complete records\n // since the buffer fetch could get halfway into a record\n const lines = str\n .slice(0, str.lastIndexOf('\\n'))\n .split('\\n')\n .filter(f => !!f)\n\n const hits2 = [] as string[]\n for (const line of lines) {\n const word = line.split(' ')[0]\n const match = word.startsWith(searchWord)\n if (!foundSomething && match) {\n foundSomething = true\n }\n\n // we are done scanning if we are lexicographically greater than the\n // search string\n if (word.slice(0, searchWord.length) > searchWord) {\n done = true\n }\n if (match) {\n hits2.push(line)\n }\n }\n const hits = hits2.flatMap(line => {\n const [term, ...parts] = line.split(' ')\n return parts.map(elt => [term, elt.split(',')[0]] as [string, string])\n })\n\n // if we are not done, and we haven't filled up maxResults with hits yet,\n // then refetch\n if (resultArr.length + hits.length < this.maxResults && !done) {\n const res2 = await this.ixFile.read(\n Buffer.alloc(CHUNK_SIZE),\n 0,\n CHUNK_SIZE,\n end,\n opts,\n )\n\n // early break if empty response\n if (!res2.bytesRead) {\n resultArr = resultArr.concat(hits)\n break\n }\n buffer = Buffer.concat([buffer, res2.buffer])\n end += CHUNK_SIZE\n }\n\n // if we have filled up the hits, or we are detected to be done via the\n // filtering, then return\n else if (resultArr.length + hits.length >= this.maxResults || done) {\n resultArr = resultArr.concat(hits)\n break\n }\n }\n\n // deduplicate results based on the detail column (resultArr[1])\n return uniqBy(resultArr, elt => elt[1]).slice(0, this.maxResults)\n }\n\n private async getIndex(opts?: { signal?: AbortSignal }) {\n const file = await this.ixxFile.readFile({\n encoding: 'utf8',\n ...opts,\n })\n return file\n .split('\\n')\n .filter(f => !!f)\n .map(line => {\n const p = line.length - ADDRESS_SIZE\n const prefix = line.slice(0, p)\n const posStr = line.slice(p)\n const pos = Number.parseInt(posStr, 16)\n return [prefix, pos] as const\n })\n }\n\n private async _getBuffer(\n searchWord: string,\n opts?: { signal?: AbortSignal },\n ) {\n let start = 0\n let end = 65536\n const indexes = await this.getIndex(opts)\n for (const [key, value] of indexes) {\n const trimmedKey = key.slice(0, searchWord.length)\n if (trimmedKey < searchWord) {\n start = value\n end = value + 65536\n }\n }\n\n // Return the buffer and its end position in the file.\n const len = end - start\n if (len < 0) {\n return undefined\n }\n const res = await this.ixFile.read(Buffer.alloc(len), 0, len, start, opts)\n return {\n ...res,\n end,\n }\n }\n}\n","import Trix from '@gmod/trix'\nimport {\n BaseTextSearchAdapter,\n BaseAdapter,\n BaseTextSearchArgs,\n} from '@jbrowse/core/data_adapters/BaseAdapter'\nimport { openLocation } from '@jbrowse/core/util/io'\nimport BaseResult from '@jbrowse/core/TextSearch/BaseResults'\nimport {\n AnyConfigurationModel,\n readConfObject,\n} from '@jbrowse/core/configuration'\nimport PluginManager from '@jbrowse/core/PluginManager'\nimport { getSubAdapterType } from '@jbrowse/core/data_adapters/dataAdapterCache'\n\nfunction decodeURIComponentNoThrow(uri: string) {\n try {\n return decodeURIComponent(uri)\n } catch (e) {\n // avoid throwing exception on a failure to decode URI component\n return uri\n }\n}\n\nfunction shorten(str: string, term: string, w = 15) {\n const tidx = str.toLowerCase().indexOf(term)\n\n return str.length < 40\n ? str\n : (Math.max(0, tidx - w) > 0 ? '...' : '') +\n str.slice(Math.max(0, tidx - w), tidx + term.length + w).trim() +\n (tidx + term.length < str.length ? '...' : '')\n}\n\nexport default class TrixTextSearchAdapter\n extends BaseAdapter\n implements BaseTextSearchAdapter\n{\n indexingAttributes?: string[]\n trixJs: Trix\n tracksNames?: string[]\n\n constructor(\n config: AnyConfigurationModel,\n getSubAdapter?: getSubAdapterType,\n pluginManager?: PluginManager,\n ) {\n super(config, getSubAdapter, pluginManager)\n const ixFilePath = readConfObject(config, 'ixFilePath')\n const ixxFilePath = readConfObject(config, 'ixxFilePath')\n\n if (!ixFilePath) {\n throw new Error('must provide out.ix')\n }\n if (!ixxFilePath) {\n throw new Error('must provide out.ixx')\n }\n this.trixJs = new Trix(\n openLocation(ixxFilePath, pluginManager),\n openLocation(ixFilePath, pluginManager),\n 1500,\n )\n }\n\n /**\n * Returns list of results\n * @param args - search options/arguments include: search query\n * limit of results to return, searchType...prefix | full | exact\", etc.\n */\n async searchIndex(args: BaseTextSearchArgs) {\n const query = args.queryString.toLowerCase()\n const strs = query.split(' ')\n const results = await this.trixJs.search(query)\n const formatted = results\n // if multi-word search try to filter out relevant items\n .filter(([, data]) =>\n strs.every(r =>\n decodeURIComponentNoThrow(data).toLowerCase().includes(r),\n ),\n )\n .map(([term, data]) => {\n const result = JSON.parse(data.replaceAll('|', ',')) as string[]\n const [loc, trackId, ...rest] = result.map(record =>\n decodeURIComponentNoThrow(record),\n )\n\n const labelFieldIdx = rest.findIndex(elt => !!elt)\n const contextIdx = rest\n .map(elt => elt.toLowerCase())\n .findIndex(f => f.includes(term.toLowerCase()))\n\n const labelField = rest[labelFieldIdx]!\n const contextField = rest[contextIdx]!\n const context =\n contextIdx !== -1 ? shorten(contextField, term) : undefined\n const label = shorten(labelField, term)\n\n const displayString =\n !context || label.toLowerCase() === context.toLowerCase()\n ? label\n : `${label} (${context})`\n\n return new BaseResult({\n locString: loc,\n label: labelField,\n displayString,\n matchedObject: result.map(record => decodeURIComponent(record)),\n trackId,\n })\n })\n\n return args.searchType === 'exact'\n ? formatted.filter(\n r => r.getLabel().toLowerCase() === args.queryString.toLowerCase(),\n )\n : formatted\n }\n\n freeResources() {}\n}\n"],"names":["CHUNK_SIZE","Trix","ixxFile","ixFile","maxResults","constructor","search","searchString","opts","resultArr","searchWord","split","toLowerCase","res","this","_getBuffer","end","buffer","done","foundSomething","str","toString","lines","slice","lastIndexOf","filter","f","hits2","line","word","match","startsWith","length","push","hits","flatMap","term","parts","map","elt","res2","read","alloc","bytesRead","concat","a","seen","Set","item","k","has","add","uniqBy","getIndex","readFile","encoding","p","prefix","posStr","Number","parseInt","start","indexes","key","value","len","decodeURIComponentNoThrow","uri","decodeURIComponent","e","shorten","w","tidx","indexOf","Math","max","trim","TrixTextSearchAdapter","BaseAdapter","config","getSubAdapter","pluginManager","super","ixFilePath","readConfObject","ixxFilePath","Error","trixJs","openLocation","searchIndex","args","query","queryString","strs","formatted","data","every","r","includes","result","JSON","parse","replaceAll","loc","trackId","rest","record","labelFieldIdx","findIndex","contextIdx","labelField","contextField","context","undefined","label","displayString","BaseResult","locString","matchedObject","searchType","getLabel","freeResources"],"sourceRoot":""}